hash
stringlengths
40
40
authorName
stringclasses
42 values
authorEmail
stringclasses
41 values
date
timestamp[ms]date
2021-07-26 09:52:55
2025-07-18 10:19:56
subject
stringlengths
11
116
diff
stringlengths
0
987k
2994f29bb319518be2b416e8aad35b2f704a2ffb
Andrea Soria Jimenez
2023-02-02T18:18:32
Adding custom exception when cache insert fails because of too many columns (#749)
diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index 790b6d85..4d6bbe76 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -198,0 +199 @@ firstRows: + columnsMaxNumber: 1_000 diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index fd1e6b77..668a0302 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -228,0 +229,3 @@ firstRows: + + # Max number of columns in the /first-rows endpoint response + columnsMaxNumber: 1_000 diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 56fc9d15..7fee8754 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -36,0 +37,2 @@ + - name: FIRST_ROWS_COLUMNS_MAX_NUMBER + value: {{ .Values.firstRows.columnsMaxNumber| quote }} diff --git a/chart/values.yaml b/chart/values.yaml index c00f5df0..fab77dc9 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -298,0 +299,3 @@ firstRows: + # Max number of columns in the /first-rows endpoint response + columnsMaxNumber: 1_000 + diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 1025004d..77028267 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -145,0 +146 @@ services: + FIRST_ROWS_COLUMNS_MAX_NUMBER: ${FIRST_ROWS_COLUMNS_MAX_NUMBER-1_000} diff --git a/workers/datasets_based/README.md b/workers/datasets_based/README.md index 15d54069..786b6ced 100644 --- a/workers/datasets_based/README.md +++ b/workers/datasets_based/README.md @@ -46,0 +47 @@ Set environment variables to configure the first rows worker (`FIRST_ROWS_` pref +- `FIRST_ROWS_COLUMNS_MAX_NUMBER`: the max number of columns (features) provided in the /first-rows endpoint response. If the number of columns is greater than the limit, an error is returned. Defaults to `1_000`. diff --git a/workers/datasets_based/src/datasets_based/config.py b/workers/datasets_based/src/datasets_based/config.py index 3c2e8844..a69b6105 100644 --- a/workers/datasets_based/src/datasets_based/config.py +++ b/workers/datasets_based/src/datasets_based/config.py @@ -48,0 +49 @@ FIRST_ROWS_MIN_NUMBER = 10 +FIRST_ROWS_COLUMNS_MAX_NUMBER = 1_000 @@ -58,0 +60 @@ class FirstRowsConfig: + columns_max_number: int = FIRST_ROWS_COLUMNS_MAX_NUMBER @@ -72,0 +75 @@ class FirstRowsConfig: + columns_max_number=env.int(name="COLUMNS_MAX_NUMBER", default=FIRST_ROWS_COLUMNS_MAX_NUMBER), diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index fc00b636..5426a2b3 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -37,0 +38,2 @@ FirstRowsWorkerErrorCode = Literal[ + "TooManyColumnsError", + "TooBigContentError", @@ -103,0 +106,14 @@ class RowsPostProcessingError(FirstRowsWorkerError): +class TooManyColumnsError(FirstRowsWorkerError): + """Raised when the dataset exceeded the max number of columns.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "TooManyColumnsError", cause, True) + + +class TooBigContentError(FirstRowsWorkerError): + """Raised when the first rows content exceeded the max size of bytes.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "TooBigContentError", cause, False) + + @@ -381,0 +398 @@ def compute_first_rows_response( + columns_max_number: int, @@ -411,0 +429,2 @@ def compute_first_rows_response( + columns_max_number (`int`): + The maximum number of columns supported. @@ -429,0 +449,4 @@ def compute_first_rows_response( + - [`~workers.first_rows.TooManyColumnsError`] + If the number of columns (features) exceeds the maximum supported number of columns. + - [`~workers.first_rows.TooBigContentError`] + If the first rows content exceeds the maximum supported size of bytes. @@ -482,0 +506,20 @@ def compute_first_rows_response( + + if features and len(features) > columns_max_number: + raise TooManyColumnsError( + f"Too many columns. The maximum supported number of columns is {columns_max_number}." + ) + + # validate size of response without the rows + features_list = to_features_list(features=features) + response_features_only: FirstRowsResponse = { + "dataset": dataset, + "config": config, + "split": split, + "features": features_list, + "rows": [], + } + + surrounding_json_size = get_json_size(response_features_only) + if surrounding_json_size > rows_max_bytes: + raise TooBigContentError("The first rows content after truncation exceeds the maximum size.") + @@ -529,10 +572 @@ def compute_first_rows_response( - # get the size of the surrounding JSON (without the rows) - features_list = to_features_list(features=features) - response: FirstRowsResponse = { - "dataset": dataset, - "config": config, - "split": split, - "features": features_list, - "rows": [], - } - surrounding_json_size = get_json_size(response) + @@ -545,0 +580,2 @@ def compute_first_rows_response( + + response = response_features_only @@ -546,0 +583 @@ def compute_first_rows_response( + @@ -580,0 +618 @@ class FirstRowsWorker(DatasetsBasedWorker): + columns_max_number=self.first_rows_config.columns_max_number, diff --git a/workers/datasets_based/tests/workers/test_first_rows.py b/workers/datasets_based/tests/workers/test_first_rows.py index bc122814..d3c1e283 100644 --- a/workers/datasets_based/tests/workers/test_first_rows.py +++ b/workers/datasets_based/tests/workers/test_first_rows.py @@ -146 +146 @@ def test_number_rows( - "name,rows_max_bytes,successful_truncation", + "name,rows_max_bytes,columns_max_number,error_code", @@ -149,2 +149,3 @@ def test_number_rows( - ("public", 10, False), # too small limit, even with truncation - ("public", 1_000, True), # not truncated + ("public", 10, 1_000, "TooBigContentError"), # too small limit, even with truncation + ("public", 1_000, 1_000, None), # not truncated + ("public", 1_000, 1, "TooManyColumnsError"), # too small columns limit @@ -152,3 +153,3 @@ def test_number_rows( - ("big", 10, False), # too small limit, even with truncation - ("big", 1_000, True), # truncated successfully - ("big", 10_000_000, True), # not truncated + ("big", 10, 1_000, "TooBigContentError"), # too small limit, even with truncation + ("big", 1_000, 1_000, None), # truncated successfully + ("big", 10_000_000, 1_000, None), # not truncated @@ -163 +164,2 @@ def test_truncation( - successful_truncation: bool, + columns_max_number: int, + error_code: str, @@ -176,0 +179 @@ def test_truncation( + columns_max_number=columns_max_number, @@ -179,3 +182,8 @@ def test_truncation( - response = worker.compute() - print(get_json_size(response)) - assert (get_json_size(response) <= rows_max_bytes) is successful_truncation + + if error_code: + with pytest.raises(CustomError) as error_info: + worker.compute() + assert error_info.value.code == error_code + else: + response = worker.compute() + assert get_json_size(response) <= rows_max_bytes
3bd06ab6266494bd747d13057a58aa4c42b196c2
Sylvain Lesage
2023-02-02T12:55:06
update the logic to skip a job (#761)
diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py index 670d2030..d55f8d2e 100644 --- a/workers/datasets_based/src/datasets_based/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -14,0 +15 @@ from libcommon.simple_cache import ( + DoesNotExist, @@ -30,0 +32,3 @@ WorkerErrorCode = Literal[ +# List of error codes that should trigger a retry. +ERROR_CODES_TO_RETRY: list[str] = ["ClientConnectionError"] + @@ -220,0 +225,2 @@ class Worker(ABC): + # TODO: set the git revision as part of the job_info -> no need to get info from the Hub + # if None: run the job @@ -227,3 +233,4 @@ class Worker(ABC): - - and the result was successful - - and it has been created with the same major version of the worker - - and it has been created with the exact same git commit of the dataset repository + - and we can get the git commit and it's not None + - and the cached entry has been created with the same git commit of the dataset repository + - and the cached entry has been created with the same major version of the worker + - and the cached entry, if an error, is not among the list of errors that should trigger a retry @@ -239,0 +247,13 @@ class Worker(ABC): + except DoesNotExist: + # no entry in the cache + return False + if cached_response["error_code"] in ERROR_CODES_TO_RETRY: + # the cache entry result was a temporary error - we process it + return False + if ( + cached_response["worker_version"] is None + or self.compare_major_version(cached_response["worker_version"]) != 0 + ): + # no worker version in the cache, or the worker has been updated - we process the job to update the cache + return False + try: @@ -241,8 +260,0 @@ class Worker(ABC): - return ( - # TODO: use "error_code" to decide if the job should be skipped (ex: retry if temporary error) - cached_response["http_status"] == HTTPStatus.OK - and cached_response["worker_version"] is not None - and self.compare_major_version(cached_response["worker_version"]) == 0 - and cached_response["dataset_git_revision"] is not None - and cached_response["dataset_git_revision"] == dataset_git_revision - ) @@ -249,0 +262,2 @@ class Worker(ABC): + # an exception occurred while getting the git revision from the Hub - the job will fail anyway, but we + # process it to store the error in the cache @@ -250,0 +265,2 @@ class Worker(ABC): + return dataset_git_revision is not None and cached_response["dataset_git_revision"] == dataset_git_revision + # skip if the git revision has not changed diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py index 86c74a8e..ceb3913f 100644 --- a/workers/datasets_based/tests/test_worker.py +++ b/workers/datasets_based/tests/test_worker.py @@ -0,0 +1,2 @@ +from dataclasses import dataclass +from http import HTTPStatus @@ -7 +9 @@ from libcommon.queue import Priority, Queue, Status -from libcommon.simple_cache import SplitFullName +from libcommon.simple_cache import SplitFullName, upsert_response @@ -10 +12 @@ from datasets_based.config import AppConfig -from datasets_based.worker import Worker +from datasets_based.worker import ERROR_CODES_TO_RETRY, Worker @@ -21,0 +24,4 @@ class DummyWorker(Worker): + return DummyWorker._get_dataset_git_revision() + + @staticmethod + def _get_dataset_git_revision() -> Optional[str]: @@ -78,0 +85,90 @@ def test_compare_major_version( +@dataclass +class CacheEntry: + error_code: Optional[str] + worker_version: Optional[str] + dataset_git_revision: Optional[str] + + +# .get_version() [email protected]( + "force,cache_entry,expected_skip", + [ + ( + False, + CacheEntry( + error_code="DoNotRetry", # an error that we don't want to retry + worker_version=DummyWorker.get_version(), + dataset_git_revision=DummyWorker._get_dataset_git_revision(), + ), + True, # skip + ), + ( + False, + CacheEntry( + error_code=None, # no error + worker_version=DummyWorker.get_version(), + dataset_git_revision=DummyWorker._get_dataset_git_revision(), + ), + True, # skip + ), + ( + True, # force + CacheEntry( + error_code="DoNotRetry", + worker_version=DummyWorker.get_version(), + dataset_git_revision=DummyWorker._get_dataset_git_revision(), + ), + False, # process + ), + ( + False, + None, # no cache entry + False, # process + ), + ( + False, + CacheEntry( + error_code=ERROR_CODES_TO_RETRY[0], # an error that we want to retry + worker_version=DummyWorker.get_version(), + dataset_git_revision=DummyWorker._get_dataset_git_revision(), + ), + False, # process + ), + ( + False, + CacheEntry( + error_code="DoNotRetry", + worker_version=None, # no worker version + dataset_git_revision=DummyWorker._get_dataset_git_revision(), + ), + False, # process + ), + ( + False, + CacheEntry( + error_code="DoNotRetry", + worker_version="0.0.1", # a different worker version + dataset_git_revision=DummyWorker._get_dataset_git_revision(), + ), + False, # process + ), + ( + False, + CacheEntry( + error_code="DoNotRetry", + worker_version=DummyWorker.get_version(), + dataset_git_revision=None, # no dataset git revision + ), + False, # process + ), + ( + False, + CacheEntry( + error_code="DoNotRetry", + worker_version=DummyWorker.get_version(), + dataset_git_revision="different", # a different dataset git revision + ), + False, # process + ), + ], +) @@ -80 +176 @@ def test_should_skip_job( - test_processing_step: ProcessingStep, + test_processing_step: ProcessingStep, force: bool, cache_entry: Optional[CacheEntry], expected_skip: bool @@ -86 +181,0 @@ def test_should_skip_job( - force = False @@ -100,4 +195,14 @@ def test_should_skip_job( - assert worker.should_skip_job() is False - # we add an entry to the cache - worker.process() - assert worker.should_skip_job() is True + if cache_entry: + upsert_response( + kind=test_processing_step.cache_kind, + dataset=dataset, + config=config, + split=split, + content={}, + http_status=HTTPStatus.OK, # <- not important + error_code=cache_entry.error_code, + details=None, + worker_version=cache_entry.worker_version, + dataset_git_revision=cache_entry.dataset_git_revision, + ) + assert worker.should_skip_job() is expected_skip
89a451056b4338176b012cefa31a5d222183fa0d
Sylvain Lesage
2023-02-02T09:03:18
ci: 🎡 only run on PR and on main (#758)
diff --git a/.github/workflows/chart.yml b/.github/workflows/chart.yml index 5d6114f3..0876bcce 100644 --- a/.github/workflows/chart.yml +++ b/.github/workflows/chart.yml @@ -7,0 +8,2 @@ on: + branches: + - main @@ -20 +22 @@ jobs: - uses: actions/checkout@main + uses: actions/checkout@v3 diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index c7964fab..78089370 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -7,0 +8,2 @@ on: + branches: + - main diff --git a/.github/workflows/j-migration.yml b/.github/workflows/j-migration.yml index 358f0e3e..201eaea8 100644 --- a/.github/workflows/j-migration.yml +++ b/.github/workflows/j-migration.yml @@ -7,0 +8,2 @@ on: + branches: + - main diff --git a/.github/workflows/l-libcommon.yml b/.github/workflows/l-libcommon.yml index 094c7077..3ba6a069 100644 --- a/.github/workflows/l-libcommon.yml +++ b/.github/workflows/l-libcommon.yml @@ -7,0 +8,2 @@ on: + branches: + - main diff --git a/.github/workflows/openapi-spec.yml b/.github/workflows/openapi-spec.yml index c1feb493..2106a43e 100644 --- a/.github/workflows/openapi-spec.yml +++ b/.github/workflows/openapi-spec.yml @@ -7,0 +8,2 @@ on: + branches: + - main diff --git a/.github/workflows/s-admin.yml b/.github/workflows/s-admin.yml index 57773bfe..7dd4c411 100644 --- a/.github/workflows/s-admin.yml +++ b/.github/workflows/s-admin.yml @@ -7,0 +8,2 @@ on: + branches: + - main diff --git a/.github/workflows/s-api.yml b/.github/workflows/s-api.yml index ef8d5ff4..972d6639 100644 --- a/.github/workflows/s-api.yml +++ b/.github/workflows/s-api.yml @@ -7,0 +8,2 @@ on: + branches: + - main diff --git a/.github/workflows/w-datasets_based.yml b/.github/workflows/w-datasets_based.yml index a454793f..2082de4a 100644 --- a/.github/workflows/w-datasets_based.yml +++ b/.github/workflows/w-datasets_based.yml @@ -7,0 +8,2 @@ on: + branches: + - main
a968dc5a290cce58fa54536264e7362576419a0a
Sylvain Lesage
2023-02-02T09:01:17
test: 💍 ensure the database is ready in the tests (#759)
diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py index ac170948..86c74a8e 100644 --- a/workers/datasets_based/tests/test_worker.py +++ b/workers/datasets_based/tests/test_worker.py @@ -6,2 +6,2 @@ from libcommon.processing_graph import ProcessingGraph, ProcessingStep -from libcommon.queue import Priority, Queue, Status, _clean_queue_database -from libcommon.simple_cache import SplitFullName, _clean_cache_database +from libcommon.queue import Priority, Queue, Status +from libcommon.simple_cache import SplitFullName @@ -8,0 +9 @@ from libcommon.simple_cache import SplitFullName, _clean_cache_database +from datasets_based.config import AppConfig @@ -13,3 +14,3 @@ from datasets_based.worker import Worker -def clean_mongo_database() -> None: - _clean_queue_database() - _clean_cache_database() +def prepare_and_clean_mongo(app_config: AppConfig) -> None: + # prepare the database before each test, and clean it afterwards + pass diff --git a/workers/datasets_based/tests/test_worker_loop.py b/workers/datasets_based/tests/test_worker_loop.py index 8e969c02..f233bac1 100644 --- a/workers/datasets_based/tests/test_worker_loop.py +++ b/workers/datasets_based/tests/test_worker_loop.py @@ -6,2 +6 @@ from libcommon.processing_graph import ProcessingStep -from libcommon.queue import Queue, _clean_queue_database -from libcommon.simple_cache import _clean_cache_database +from libcommon.queue import Queue @@ -8,0 +8 @@ from libcommon.simple_cache import _clean_cache_database +from datasets_based.config import AppConfig @@ -14,3 +14,3 @@ from datasets_based.worker_loop import WorkerLoop -def clean_mongo_database() -> None: - _clean_queue_database() - _clean_cache_database() +def prepare_and_clean_mongo(app_config: AppConfig) -> None: + # prepare the database before each test, and clean it afterwards + pass diff --git a/workers/datasets_based/tests/workers/test_dataset_info.py b/workers/datasets_based/tests/workers/test_dataset_info.py index d569e14d..d2546ffd 100644 --- a/workers/datasets_based/tests/workers/test_dataset_info.py +++ b/workers/datasets_based/tests/workers/test_dataset_info.py @@ -9 +9 @@ from libcommon.queue import Priority -from libcommon.simple_cache import _clean_cache_database, upsert_response +from libcommon.simple_cache import upsert_response @@ -21,2 +21,3 @@ from datasets_based.workers.dataset_info import ( -def clean_mongo_database(app_config: AppConfig) -> None: - _clean_cache_database() +def prepare_and_clean_mongo(app_config: AppConfig) -> None: + # prepare the database before each test, and clean it afterwards + pass diff --git a/workers/datasets_based/tests/workers/test_parquet.py b/workers/datasets_based/tests/workers/test_parquet.py index b157b98e..d62f678b 100644 --- a/workers/datasets_based/tests/workers/test_parquet.py +++ b/workers/datasets_based/tests/workers/test_parquet.py @@ -9 +9 @@ from libcommon.queue import Priority -from libcommon.simple_cache import _clean_cache_database, upsert_response +from libcommon.simple_cache import upsert_response @@ -21,2 +21,3 @@ from datasets_based.workers.parquet import ( -def clean_mongo_database(app_config: AppConfig) -> None: - _clean_cache_database() +def prepare_and_clean_mongo(app_config: AppConfig) -> None: + # prepare the database before each test, and clean it afterwards + pass diff --git a/workers/datasets_based/tests/workers/test_sizes.py b/workers/datasets_based/tests/workers/test_sizes.py index ec010f33..712a31f1 100644 --- a/workers/datasets_based/tests/workers/test_sizes.py +++ b/workers/datasets_based/tests/workers/test_sizes.py @@ -9 +9 @@ from libcommon.queue import Priority -from libcommon.simple_cache import _clean_cache_database, upsert_response +from libcommon.simple_cache import upsert_response @@ -21,2 +21,3 @@ from datasets_based.workers.sizes import ( -def clean_mongo_database(app_config: AppConfig) -> None: - _clean_cache_database() +def prepare_and_clean_mongo(app_config: AppConfig) -> None: + # prepare the database before each test, and clean it afterwards + pass
75ae5f7e2e96738ff37358ceab8aa85b2caee206
Sylvain Lesage
2023-02-01T15:47:24
refactor: 💡 remove dead code (#757)
diff --git a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py index 18fb3126..2cccb3d8 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py @@ -424,48 +423,0 @@ class EmptyFeaturesError(Exception): -# def dataset_info_to_splits_response(dataset: str, config_infos: List[DatasetInfo]): -# split_items: List[SplitItem] = [] -# for config_info in config_infos: -# config = config_info.config_name -# if config is None: -# raise EmptyConfigNameError(f"Dataset info for dataset='{dataset}' has no config name.") -# if config_info.splits is None: -# raise EmptySplitsError(f"Dataset info for dataset='{dataset}', config='{config}' has no splits.") -# if config_info.download_size is None: -# raise EmptyDownloadSizeError( -# f"Dataset info for dataset='{dataset}', config='{config}' has no download_size." -# ) -# if config_info.features is None: -# raise EmptyFeaturesError(f"Dataset info for dataset='{dataset}', config='{config}' has no features.") -# for split_info in config_info.splits.values(): -# if not isinstance(split_info, SplitInfo): -# raise SplitInfoFormatError( -# f"Split info for dataset='{dataset}', config='{config}' has an unknown format." -# ) -# split = split_info.name -# split_items.append( -# # {'train': SplitInfo(name='train', num_bytes=148581, num_examples=569, shard_lengths=None, -# #dataset_name='csv')} -# { -# "dataset": dataset, -# "config": config, -# "split": split, -# "stats": { -# "config_download_size": config_info.download_size, -# "parquet_size": split_info.num_bytes, -# "num_examples": split_info.num_examples, -# "num_columns": len(config_info.features), -# TODO: shard? -# }, -# "links": { -# ... -# } -# } -# ) - -# # # original_size -# # # parquet_size -# # # num_rows -# # # num_columns -# # # links to: columns (features), first-rows, parquet files -# # config_info: Dict[str, DatasetInfo] = {} - -
893a70cb7201090b8c64cd127fbe029c723f2aa3
Sylvain Lesage
2023-02-01T10:09:11
remove docker-images.yaml, and fix dev.yaml (#752)
diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 69dba2fc..c7964fab 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -10 +9,0 @@ on: - - 'chart/docker-images.yaml' @@ -20 +18,0 @@ on: - - 'chart/docker-images.yaml' diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 15223062..1073d9cd 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -82,6 +82,5 @@ The following environments contain all the modules: reverse proxy, API server, a -| Environment | URL | Type | How to deploy | -| ------------------------ | ---------------------------------------------------- | ----------------- | ----------------------------------------------------------------------- | -| Production | https://datasets-server.huggingface.co | Helm / Kubernetes | `make upgrade-prod` in [chart](./chart) | -| Development | https://datasets-server.us.dev.moon.huggingface.tech | Helm / Kubernetes | `make upgrade-dev` in [chart](./chart) | -| Local from remote images | http://localhost:8100 | Docker compose | `make start-from-remote-images` (fetches docker images from Docker Hub) | -| Local build | http://localhost:8000 | Docker compose | `make start-from-local-code` (builds docker images) | +| Environment | URL | Type | How to deploy | +| ----------- | ---------------------------------------------------- | ----------------- | --------------------------------------- | +| Production | https://datasets-server.huggingface.co | Helm / Kubernetes | `make upgrade-prod` in [chart](./chart) | +| Development | https://datasets-server.us.dev.moon.huggingface.tech | Helm / Kubernetes | `make upgrade-dev` in [chart](./chart) | +| Local build | http://localhost:8100 | Docker compose | `make start` (builds docker images) | @@ -123,13 +122 @@ make e2e -We version the [libraries](./libs) as they are dependencies of the [services](./services). To update a library: - -- change the version in its pyproject.yaml file -- build with `make build` -- version the new files in `dist/` - -And then update the library version in the services that require the update, for example if the library is `libcommon`: - -``` -poetry update libcommon -``` - -If service is updated, we don't update its version in the `pyproject.yaml` file. But we have to update the [docker images file](./chart/docker-images.yaml) with the new image tag. Then the CI will test the new docker images, and we will be able to deploy them to the infrastructure. +If service is updated, we don't update its version in the `pyproject.yaml` file. But we have to update the [helm chart](./chart/) with the new image tag, corresponding to the last build docker published on docker.io by the CI. diff --git a/Makefile b/Makefile index 5a2af974..153acb7b 100644 --- a/Makefile +++ b/Makefile @@ -10 +9,0 @@ DOCKER_COMPOSE := ./tools/docker-compose-datasets-server.yml -# DOCKER_IMAGES := ./chart/docker-images.yaml @@ -12 +10,0 @@ DOCKER_COMPOSE := ./tools/docker-compose-datasets-server.yml -# include tools/DockerRemoteImages.mk diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml deleted file mode 100644 index e69de29b..00000000 diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index 1f0d6244..790b6d85 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -5,2 +5,42 @@ - -hostname: "datasets-server.us.dev.moon.huggingface.tech" +global: + huggingface: + imageRegistry: "" + imagePullSecrets: [] + privateHub: + enabled: false + ingress: + domain: us.dev.moon.huggingface.tech + subdomains: + datasetsServer: datasets-server + +images: + pullPolicy: IfNotPresent + pullSecrets: [] + reverseProxy: + useGlobalRegistry: false + registry: docker.io + repository: nginx + tag: "1.20" + jobs: + mongodbMigration: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-jobs-mongodb_migration + tag: sha-25ff490 + services: + admin: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-services-admin + tag: sha-25ff490 + api: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-services-api + tag: sha-25ff490 + workers: + datasetsBased: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-workers-datasets_based + tag: sha-25ff490 @@ -23 +62,0 @@ persistence: - @@ -30,0 +70,2 @@ common: + # URL of the HuggingFace Hub + hfEndpoint: "https://huggingface.co" @@ -35,0 +77,6 @@ common: +mongodbMigration: + resources: + requests: + cpu: 100m + limits: + cpu: 1 @@ -43 +90,2 @@ storageAdmin: - cpu: 0.01 + cpu: 100m + memory: "256Mi" @@ -45,0 +94 @@ storageAdmin: + memory: "256Mi" @@ -53 +102,2 @@ reverseProxy: - cpu: 0.01 + cpu: 100m + memory: "256Mi" @@ -55,0 +106,6 @@ reverseProxy: + memory: "256Mi" + service: + type: NodePort + tolerations: + - key: CriticalAddonsOnly + operator: Equal @@ -57,0 +114,3 @@ ingress: + tls: + - hosts: + - "datasets-server.us.dev.moon.huggingface.tech" @@ -71,0 +131,2 @@ admin: + uvicornNumWorkers: "1" + @@ -72,0 +134,2 @@ admin: + service: + type: NodePort @@ -75 +138,2 @@ admin: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -77,0 +142 @@ admin: + memory: "4Gi" @@ -79,0 +145,2 @@ api: + uvicornNumWorkers: "1" + @@ -80,0 +148,2 @@ api: + service: + type: NodePort @@ -83 +152,2 @@ api: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -85,0 +156 @@ api: + memory: "4Gi" @@ -93 +164,2 @@ configNames: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -95,0 +168 @@ configNames: + memory: "4Gi" @@ -97 +170 @@ configNames: -splitsNames: +splitNames: @@ -101 +174,2 @@ splitsNames: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -103,0 +178 @@ splitsNames: + memory: "4Gi" @@ -109 +184,2 @@ splits: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -111,0 +188 @@ splits: + memory: "4Gi" @@ -117 +194,2 @@ firstRows: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -119,0 +198 @@ firstRows: + memory: "4Gi" @@ -121,0 +201 @@ parquetAndDatasetInfo: + @@ -127 +207,2 @@ parquetAndDatasetInfo: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -129,0 +211 @@ parquetAndDatasetInfo: + memory: "4Gi" @@ -135 +217,2 @@ parquet: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -137,0 +221 @@ parquet: + memory: "4Gi" @@ -143 +227,2 @@ datasetInfo: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -145,0 +231 @@ datasetInfo: + memory: "4Gi" @@ -151 +237,2 @@ sizes: - cpu: 0.01 + cpu: 100m + memory: "512Mi" @@ -153,0 +241 @@ sizes: + memory: "4Gi" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index d1d1f55d..fd1e6b77 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -4,38 +4 @@ -## Production -# -# datasets-server is installed on a [kubernetes cluster](https://us-east-1.console.aws.amazon.com/eks/home?region=us-east-1#/clusters) -# -# Grafana: -# -# - https://grafana.huggingface.tech/d/SaHl2KX7z/datasets-server-admin-cache-and-queue -# - https://grafana.huggingface.tech/d/iPuzZbrnk/datasets-server-api-routes -# - https://grafana.huggingface.tech/d/85a562078cdf77779eaa1add43ccec1e/kubernetes-compute-resources-namespace-pods?var-datasource=Prometheus%20EKS%20Hub%20Prod&var-namespace=datasets-server -# -# BetterUptime: -# -# - https://betteruptime.com/team/14149/monitors/389098 -# - https://betteruptime.com/team/14149/monitors/691070 -# -# resources for the prod namespace are defined here: https://us-east-1.console.aws.amazon.com/eks/home?region=us-east-1#/clusters/hub-prod/nodegroups/datasets-server-20220513085103612000000001 -# the nodes are up to 20 t3.2xlarge instances (8 vCPUs, 32 GiB), with autoscale -# (see https://github.com/huggingface/infra/pull/239/files) -# this means that we can get up to: -# 160 vCPUs and 640 GiB RAM are available (but no more than 8 cpus or 32 GiB for each pod) -# -# the max resources (limits) per deployment are: -# - reverse-proxy: 2 pods -> 2 CPUs, 512MiB -# - api: 4 pods -> 4 CPUs, 4 GiB -# - admin: 1 pod -> 1 CPU -# and for the workers: -# - splits: 1 CPUs, 30 GiB -# - firstRows: 1 CPUs, 30 GiB -# We set the requested RAM to 8 GiB per worker, in order to trigger the autoscale. We should be able to -# launch 3 worker pods per node, taking the sidecars into account, it means 60 pods -# -# Being optimistic about not all the pods having to increase their memory usage to 30 GiB at the same time, -# ie over-committing a bit, we can set up to 60 workers (dataset + split). -# -# For now, we have to scale manually with: -# kubectl scale --replicas=16 deploy/datasets-server-prod-worker-splits -# or -# kubectl scale --replicas=32 deploy/datasets-server-prod-worker-first-rows +# --- common parameters --- @@ -88,2 +50,0 @@ persistence: - # https://us-east-1.console.aws.amazon.com/fsx/home?region=us-east-1#file-system-details/fs-02050b8d555063cde - # Alarm: https://us-east-1.console.aws.amazon.com/cloudwatch/home?region=us-east-1#alarmsV2:alarm/Low+disk+on+datasets+server? @@ -96 +56,0 @@ mongodb: - # we use the secret instead to get the mongo URL @@ -145,7 +104,0 @@ reverseProxy: - annotations: - service.beta.kubernetes.io/aws-load-balancer-additional-resource-tags: Env=prod,Project=datasets-server,Terraform=true - service.beta.kubernetes.io/aws-load-balancer-name: hub-prod-datasets-server-nlb - service.beta.kubernetes.io/aws-load-balancer-nlb-target-type: instance - service.beta.kubernetes.io/aws-load-balancer-scheme: internal - service.beta.kubernetes.io/aws-load-balancer-type: external - service.beta.kubernetes.io/aws-load-balancer-target-node-labels: role-datasets-server=true @@ -309 +261,0 @@ parquetAndDatasetInfo: - tolerations: [] @@ -326 +277,0 @@ parquet: - tolerations: [] @@ -343 +293,0 @@ datasetInfo: - tolerations: [] @@ -360 +309,0 @@ sizes: - tolerations: [] diff --git a/chart/values.yaml b/chart/values.yaml index 99045b03..c00f5df0 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -87,12 +86,0 @@ mongodb: -# TODO : Update Makefile script to update this values -# overridden by docker-images.yaml (which must be in JSON format!). See Makefile for details. -dockerImage: - reverseProxy: "" - jobs: - mongodb_migration: "" - services: - admin: "" - api: "" - workers: - datasets_based: "" - diff --git a/e2e/Makefile b/e2e/Makefile index b12d45d5..97ec2bee 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -19 +18,0 @@ DOCKER_COMPOSE := ../tools/docker-compose-datasets-server.yml -# DOCKER_IMAGES := ../chart/docker-images.yaml @@ -23 +21,0 @@ include ../tools/PythonTest.mk -# include ../tools/DockerRemoteImages.mk diff --git a/tools/DockerRemoteImages.mk b/tools/DockerRemoteImages.mk deleted file mode 100644 index 232fb14d..00000000 --- a/tools/DockerRemoteImages.mk +++ /dev/null @@ -1,4 +0,0 @@ -export IMAGE_REVERSE_PROXY := $(shell jq -r '.dockerImage.reverseProxy' ${DOCKER_IMAGES}) -export IMAGE_SERVICE_ADMIN := $(shell jq -r '.dockerImage.services.admin' ${DOCKER_IMAGES}) -export IMAGE_SERVICE_API := $(shell jq -r '.dockerImage.services.api' ${DOCKER_IMAGES}) -export IMAGE_WORKER_DATASETS_BASED := $(shell jq -r '.dockerImage.workers.datasets_based' ${DOCKER_IMAGES})
223753919d31ef764a64d851f415adb5080c0f2b
Sylvain Lesage
2023-01-31T16:24:59
feat: 🎸 update docker images (#748)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index d1dd2901..d1d1f55d 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -56 +56 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 @@ -62 +62 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 @@ -67 +67 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 @@ -73 +73 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 diff --git a/chart/values.yaml b/chart/values.yaml index 2ac6369b..99045b03 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -25 +25 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 @@ -31 +31 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 @@ -36 +36 @@ images: - tag: sha-2f38593 + tag: sha-25ff490 @@ -42 +42 @@ images: - tag: sha-2f38593 + tag: sha-25ff490
25ff4909881ddbb6fa212a12313fca6b931aabbf
Quentin Lhoest
2023-01-31T15:40:00
add HF_TOKEN env var for admin ui (#746)
diff --git a/front/admin_ui/app.py b/front/admin_ui/app.py index 23aaf1e4..571d717b 100644 --- a/front/admin_ui/app.py +++ b/front/admin_ui/app.py @@ -17,0 +18 @@ ADMIN_HF_ORGANIZATION = os.environ.get("ADMIN_HF_ORGANIZATION", "huggingface") +HF_TOKEN = os.environ.get("HF_TOKEN") @@ -32 +32,0 @@ def healthcheck(): - @@ -37 +37 @@ with gr.Blocks() as demo: - with gr.Row() as auth_page: + with gr.Row(visible=HF_TOKEN is None) as auth_page: @@ -43 +43 @@ with gr.Blocks() as demo: - with gr.Row(visible=False) as main_page: + with gr.Row(visible=HF_TOKEN is not None) as main_page: @@ -45 +45 @@ with gr.Blocks() as demo: - welcome_title = gr.Markdown("") + welcome_title = gr.Markdown("### Welcome") @@ -77 +77,3 @@ with gr.Blocks() as demo: - return f"❌ Unauthorized (user '{user['name']} is not a member of '{ADMIN_HF_ORGANIZATION}')" + return { + auth_error: gr.update(value=f"❌ Unauthorized (user '{user['name']} is not a member of '{ADMIN_HF_ORGANIZATION}')") + } @@ -79,0 +82 @@ with gr.Blocks() as demo: + token = token or HF_TOKEN
af18ee98b448d9b4b1d30206f426f80b40644651
Sylvain Lesage
2023-01-31T15:36:42
fix: 🐛 fix the migration scripts to be able to run on new base (#747)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 7add4758..d1dd2901 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -52,5 +52,5 @@ images: - # mongodbMigration: - # registry: huggingface - # useGlobalRegistry: false - # repository: datasets-server-jobs-mongodb_migration - # tag: sha-2f38593 + mongodbMigration: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-jobs-mongodb_migration + tag: sha-2f38593 diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py index 74d59e4f..549b61d4 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py @@ -4 +3,0 @@ -import enum @@ -6,2 +4,0 @@ import logging -import types -from typing import Generic, Type, TypeVar @@ -9 +6 @@ from typing import Generic, Type, TypeVar -from mongoengine import Document +from libcommon.queue import Job @@ -11,2 +7,0 @@ from mongoengine.connection import get_db -from mongoengine.fields import BooleanField, DateTimeField, EnumField, StringField -from mongoengine.queryset.queryset import QuerySet @@ -22 +17 @@ class MigrationAddForceToJob(Migration): - logging.info("Add the force field, with the default value (False), to all the jobs") + logging.info("If missing, add the force field with the default value (False) to the jobs") @@ -24 +19 @@ class MigrationAddForceToJob(Migration): - db["jobsBlue"].update_many({}, {"$set": {"force": False}}) + db["jobsBlue"].update_many({"force": {"$exists": False}}, {"$set": {"force": False}}) @@ -32 +27 @@ class MigrationAddForceToJob(Migration): - logging.info("Ensure that a random selection of jobs have the 'force' field set to False") + logging.info("Ensure that a random selection of jobs have the 'force' field") @@ -34,83 +29 @@ class MigrationAddForceToJob(Migration): - def custom_validation(doc: JobSnapshot) -> None: - if doc.force: - raise ValueError("force should be False") - - check_documents(DocCls=JobSnapshot, sample_size=10, custom_validation=custom_validation) - if JobSnapshot.objects(force=False).count() != JobSnapshot.objects.count(): - raise ValueError('All the objects should have the "force" field, set to False') - - -# --- JobSnapshot --- -# copied from libcommon.queue.Job, as a snapshot of when the migration was created -class Status(enum.Enum): - WAITING = "waiting" - STARTED = "started" - SUCCESS = "success" - ERROR = "error" - CANCELLED = "cancelled" - SKIPPED = "skipped" - - -# START monkey patching ### hack ### -# see https://github.com/sbdchd/mongo-types#install -U = TypeVar("U", bound=Document) - - -def no_op(self, x): # type: ignore - return self - - -QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) - - -class QuerySetManager(Generic[U]): - def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: - return QuerySet(cls, cls._get_collection()) - - -# END monkey patching ### hack ### - - -class JobSnapshot(Document): - """A job in the mongoDB database - - Args: - type (`str`): The type of the job, identifies the queue - dataset (`str`): The dataset on which to apply the job. - config (`str`, optional): The config on which to apply the job. - split (`str`, optional): The config on which to apply the job. - unicity_id (`str`): A string that identifies the job uniquely. Only one job with the same unicity_id can be in - the started state. - namespace (`str`): The dataset namespace (user or organization) if any, else the dataset name (canonical name). - force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. - status (`Status`, optional): The status of the job. Defaults to Status.WAITING. - created_at (`datetime`): The creation date of the job. - started_at (`datetime`, optional): When the job has started. - finished_at (`datetime`, optional): When the job has finished. - """ - - meta = { - "collection": "jobsBlue", - "db_alias": "queue", - "indexes": [ - "status", - ("type", "status"), - ("type", "dataset", "status"), - ("type", "dataset", "config", "split", "status"), - ("status", "type", "created_at", "namespace"), - "-created_at", - ], - } - type = StringField(required=True) - dataset = StringField(required=True) - config = StringField() - split = StringField() - unicity_id = StringField(required=True) - namespace = StringField(required=True) - force = BooleanField(default=False) - status = EnumField(Status, default=Status.WAITING) - created_at = DateTimeField(required=True) - started_at = DateTimeField() - finished_at = DateTimeField() - - objects = QuerySetManager["JobSnapshot"]() + check_documents(DocCls=Job, sample_size=10) diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py index 273c946c..46cc56f5 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py @@ -3,0 +4 @@ +import contextlib @@ -5,5 +5,0 @@ import logging -import types -from datetime import datetime, timezone -from enum import Enum -from http import HTTPStatus -from typing import Generic, Type, TypeVar @@ -11,2 +7 @@ from typing import Generic, Type, TypeVar -from bson import ObjectId -from mongoengine import Document +from libcommon.simple_cache import CachedResponse @@ -14,8 +9 @@ from mongoengine.connection import get_db -from mongoengine.fields import ( - DateTimeField, - DictField, - EnumField, - ObjectIdField, - StringField, -) -from mongoengine.queryset.queryset import QuerySet +from pymongo.errors import InvalidName @@ -26,6 +13,0 @@ from mongodb_migration.migration import Migration - -class CacheKind(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" - - @@ -35,0 +18,2 @@ cachedResponseCollection = "cachedResponsesBlue" +SPLITS_KIND = "/splits" +FIRST_ROWS_KIND = "/first-rows" @@ -48,48 +32,50 @@ class MigrationMoveToGenericCachedResponse(Migration): - for splits_response in db[splitsResponseCollection].find(): - if not isinstance(splits_response, dict): - # for mypy - raise ValueError("splits_response should be a dict") - db[cachedResponseCollection].insert_one( - { - "_id": splits_response.get("_id"), - "kind": CacheKind.SPLITS.value, - # ^ "kind" is a new field - "dataset": splits_response.get("dataset_name"), - "config": None, - "split": None, - # ^ "config" and "split" are None for kind=/splits - "http_status": splits_response.get("http_status"), - "error_code": splits_response.get("error_code"), - "content": splits_response.get("response"), - # ^ "response" field has been renamed to "content" - "worker_version": splits_response.get("worker_version"), - "dataset_git_revision": splits_response.get("dataset_git_revision"), - "details": splits_response.get("details"), - "updated_at": splits_response.get("updated_at"), - # "stale" field is not used anymore - } - ) - for first_rows_response in db[firstRowsResponseCollection].find(): - if not isinstance(first_rows_response, dict): - # for mypy - raise ValueError("first_rows_response should be a dict") - db[cachedResponseCollection].insert_one( - { - "_id": first_rows_response.get("_id"), - "kind": CacheKind.FIRST_ROWS.value, - # ^ "kind" is a new field - "dataset": first_rows_response.get("dataset_name"), - "config": first_rows_response.get("config_name"), - "split": first_rows_response.get("split_name"), - # ^ "config" and "split" are None for kind=/splits - "http_status": first_rows_response.get("http_status"), - "error_code": first_rows_response.get("error_code"), - "content": first_rows_response.get("response"), - # ^ "response" field has been renamed to "content" - "worker_version": first_rows_response.get("worker_version"), - "dataset_git_revision": first_rows_response.get("dataset_git_revision"), - "details": first_rows_response.get("details"), - "updated_at": first_rows_response.get("updated_at"), - # "stale" field is not used anymore - } - ) + with contextlib.suppress(InvalidName): + for splits_response in db[splitsResponseCollection].find(): + if not isinstance(splits_response, dict): + # for mypy + raise ValueError("splits_response should be a dict") + db[cachedResponseCollection].insert_one( + { + "_id": splits_response.get("_id"), + "kind": SPLITS_KIND, + # ^ "kind" is a new field + "dataset": splits_response.get("dataset_name"), + "config": None, + "split": None, + # ^ "config" and "split" are None for kind=/splits + "http_status": splits_response.get("http_status"), + "error_code": splits_response.get("error_code"), + "content": splits_response.get("response"), + # ^ "response" field has been renamed to "content" + "worker_version": splits_response.get("worker_version"), + "dataset_git_revision": splits_response.get("dataset_git_revision"), + "details": splits_response.get("details"), + "updated_at": splits_response.get("updated_at"), + # "stale" field is not used anymore + } + ) + with contextlib.suppress(InvalidName): + for first_rows_response in db[firstRowsResponseCollection].find(): + if not isinstance(first_rows_response, dict): + # for mypy + raise ValueError("first_rows_response should be a dict") + db[cachedResponseCollection].insert_one( + { + "_id": first_rows_response.get("_id"), + "kind": FIRST_ROWS_KIND, + # ^ "kind" is a new field + "dataset": first_rows_response.get("dataset_name"), + "config": first_rows_response.get("config_name"), + "split": first_rows_response.get("split_name"), + # ^ "config" and "split" are None for kind=/splits + "http_status": first_rows_response.get("http_status"), + "error_code": first_rows_response.get("error_code"), + "content": first_rows_response.get("response"), + # ^ "response" field has been renamed to "content" + "worker_version": first_rows_response.get("worker_version"), + "dataset_git_revision": first_rows_response.get("dataset_git_revision"), + "details": first_rows_response.get("details"), + "updated_at": first_rows_response.get("updated_at"), + # "stale" field is not used anymore + } + ) @@ -103 +89,2 @@ class MigrationMoveToGenericCachedResponse(Migration): - db[cachedResponseCollection].drop() + with contextlib.suppress(InvalidName): + db[cachedResponseCollection].drop() @@ -108,5 +95 @@ class MigrationMoveToGenericCachedResponse(Migration): - def custom_validation(doc: CachedResponseSnapshot) -> None: - if doc.kind not in (CacheKind.SPLITS.value, CacheKind.FIRST_ROWS.value): - raise ValueError("kind should be /splits or /first-rows") - - check_documents(DocCls=CachedResponseSnapshot, sample_size=10, custom_validation=custom_validation) + check_documents(DocCls=CachedResponse, sample_size=10) @@ -115,3 +98,9 @@ class MigrationMoveToGenericCachedResponse(Migration): - splits_responses_count = db[splitsResponseCollection].count_documents({}) - first_rows_responses_count = db[firstRowsResponseCollection].count_documents({}) - cached_responses_count = CachedResponseSnapshot.objects.count() + try: + splits_responses_count = db[splitsResponseCollection].count_documents({}) + except InvalidName: + splits_responses_count = 0 + try: + first_rows_responses_count = db[firstRowsResponseCollection].count_documents({}) + except InvalidName: + first_rows_responses_count = 0 + cached_responses_count = CachedResponse.objects.count() @@ -123,82 +111,0 @@ class MigrationMoveToGenericCachedResponse(Migration): - - -# --- CachedResponseSnapshot --- - -# START monkey patching ### hack ### -# see https://github.com/sbdchd/mongo-types#install -U = TypeVar("U", bound=Document) - - -def no_op(self, x): # type: ignore - return self - - -QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) - - -class QuerySetManager(Generic[U]): - def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: - return QuerySet(cls, cls._get_collection()) - - -# END monkey patching ### hack ### - - -def get_datetime() -> datetime: - return datetime.now(timezone.utc) - - -# cache of any endpoint -class CachedResponseSnapshot(Document): - """A response to an endpoint request, cached in the mongoDB database - - Args: - kind (`str`): The kind of the cached response, identifies the endpoint - dataset (`str`): The requested dataset. - config (`str`, optional): The requested config, if any. - split (`str`, optional): The requested split, if any. - http_status (`HTTPStatus`): The HTTP status code. - error_code (`str`, optional): The error code, if any. - content (`dict`): The content of the cached response. Can be an error or a valid content. - details (`dict`, optional): Additional details, eg. a detailed error that we don't want to send as a response. - updated_at (`datetime`): When the cache entry has been last updated. - worker_version (`str`): The semver version of the worker that cached the response. - dataset_git_revision (`str`): The commit (of the git dataset repo) used to generate the response. - """ - - id = ObjectIdField(db_field="_id", primary_key=True, default=ObjectId) - - kind = StringField(required=True, unique_with=["dataset", "config", "split"]) - dataset = StringField(required=True) - config = StringField() - split = StringField() - - http_status = EnumField(HTTPStatus, required=True) - error_code = StringField() - content = DictField(required=True) - worker_version = StringField() - dataset_git_revision = StringField() - - details = DictField() - updated_at = DateTimeField(default=get_datetime) - - meta = { - "collection": cachedResponseCollection, - "db_alias": db_name, - "indexes": [ - ("kind", "dataset", "config", "split"), - ("dataset", "kind", "http_status"), - ("kind", "http_status", "dataset"), - ("kind", "http_status", "error_code"), - ("kind", "id"), - ], - } - objects = QuerySetManager["CachedResponseSnapshot"]() - - -# Fix issue with mongoengine: https://github.com/MongoEngine/mongoengine/issues/1242#issuecomment-810501601 -# mongoengine automatically sets "config" and "splits" as required fields, because they are listed in the unique_with -# field of the "kind" field. But it's an error, since unique indexes (which are used to enforce unique_with) accept -# null values, see https://www.mongodb.com/docs/v5.0/core/index-unique/#unique-index-and-missing-field. -CachedResponseSnapshot.config.required = False # type: ignore -CachedResponseSnapshot.split.required = False # type: ignore diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py index 43291b6f..5f3ab06b 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py @@ -4 +3,0 @@ -import enum @@ -6,2 +4,0 @@ import logging -import types -from typing import Generic, Type, TypeVar @@ -9 +6 @@ from typing import Generic, Type, TypeVar -from mongoengine import Document +from libcommon.queue import Job @@ -11,2 +7,0 @@ from mongoengine.connection import get_db -from mongoengine.fields import BooleanField, DateTimeField, EnumField, StringField -from mongoengine.queryset.queryset import QuerySet @@ -22 +17 @@ class MigrationAddPriorityToJob(Migration): - logging.info("Add the priority field, with the default value ('normal'), to all the jobs") + logging.info("If missing, add the priority field with the default value ('normal') to the jobs") @@ -24 +19 @@ class MigrationAddPriorityToJob(Migration): - db["jobsBlue"].update_many({}, {"$set": {"priority": "normal"}}) + db["jobsBlue"].update_many({"priority": {"$exists": False}}, {"$set": {"priority": "normal"}}) @@ -32 +27 @@ class MigrationAddPriorityToJob(Migration): - logging.info("Ensure that a random selection of jobs have the 'priority' field set to 'normal'") + logging.info("Ensure that a random selection of jobs have the 'priority' field set") @@ -34,90 +29 @@ class MigrationAddPriorityToJob(Migration): - def custom_validation(doc: JobSnapshot) -> None: - if doc.priority != Priority.NORMAL: - raise ValueError("priority should be 'normal'") - - check_documents(DocCls=JobSnapshot, sample_size=10, custom_validation=custom_validation) - if JobSnapshot.objects(priority=Priority.NORMAL).count() != JobSnapshot.objects.count(): - raise ValueError('All the objects should have the "priority" field, set to "normal"') - - -# --- JobSnapshot --- -# copied from libcommon.queue.Job, as a snapshot of when the migration was created -class Status(enum.Enum): - WAITING = "waiting" - STARTED = "started" - SUCCESS = "success" - ERROR = "error" - CANCELLED = "cancelled" - SKIPPED = "skipped" - - -class Priority(enum.Enum): - NORMAL = "normal" - LOW = "low" - - -# START monkey patching ### hack ### -# see https://github.com/sbdchd/mongo-types#install -U = TypeVar("U", bound=Document) - - -def no_op(self, x): # type: ignore - return self - - -QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) - - -class QuerySetManager(Generic[U]): - def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: - return QuerySet(cls, cls._get_collection()) - - -# END monkey patching ### hack ### - - -class JobSnapshot(Document): - """A job in the mongoDB database - - Args: - type (`str`): The type of the job, identifies the queue - dataset (`str`): The dataset on which to apply the job. - config (`str`, optional): The config on which to apply the job. - split (`str`, optional): The config on which to apply the job. - unicity_id (`str`): A string that identifies the job uniquely. Only one job with the same unicity_id can be in - the started state. - namespace (`str`): The dataset namespace (user or organization) if any, else the dataset name (canonical name). - force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. - priority (`Priority`, optional): The priority of the job. Defaults to Priority.NORMAL. - status (`Status`, optional): The status of the job. Defaults to Status.WAITING. - created_at (`datetime`): The creation date of the job. - started_at (`datetime`, optional): When the job has started. - finished_at (`datetime`, optional): When the job has finished. - """ - - meta = { - "collection": "jobsBlue", - "db_alias": "queue", - "indexes": [ - "status", - ("type", "status"), - ("type", "dataset", "status"), - ("type", "dataset", "config", "split", "status", "force", "priority"), - ("status", "type", "created_at", "namespace", "unicity_id", "priority"), - "-created_at", - ], - } - type = StringField(required=True) - dataset = StringField(required=True) - config = StringField() - split = StringField() - unicity_id = StringField(required=True) - namespace = StringField(required=True) - force = BooleanField(default=False) - priority = EnumField(Priority, default=Priority.NORMAL) - status = EnumField(Status, default=Status.WAITING) - created_at = DateTimeField(required=True) - started_at = DateTimeField() - finished_at = DateTimeField() - - objects = QuerySetManager["JobSnapshot"]() + check_documents(DocCls=Job, sample_size=10)
bee1e975a1784ced71836a534415d6c9f6903cde
Quentin Lhoest
2023-01-31T15:26:36
fir admin ui requirements.txt (#742)
diff --git a/front/admin_ui/requirements.txt b/front/admin_ui/requirements.txt index e1e0d9a9..a0054b99 100644 --- a/front/admin_ui/requirements.txt +++ b/front/admin_ui/requirements.txt @@ -1,5 +1,5 @@ -gradio==~3.16.1 -matplotlib==^3.3.4 -requests==^2.26.0 -huggingface-hub==~0.12.0 -duckdb==~0.6.1 +gradio~=3.16.1 +matplotlib>=3.3.4 +requests>=2.26.0 +huggingface-hub~=0.12.0 +duckdb~=0.6.1
b08e3f0b549c79648c5d45870485209bfe02a9af
Sylvain Lesage
2023-01-31T14:53:08
fix: 🐛 disable the mongodbMigration job for now (#743)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index d1dd2901..7add4758 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -52,5 +52,5 @@ images: - mongodbMigration: - registry: huggingface - useGlobalRegistry: false - repository: datasets-server-jobs-mongodb_migration - tag: sha-2f38593 + # mongodbMigration: + # registry: huggingface + # useGlobalRegistry: false + # repository: datasets-server-jobs-mongodb_migration + # tag: sha-2f38593
9387638f786c29d2f7b8170270e3ad0fe48b3406
Quentin Lhoest
2023-01-31T14:28:54
add gradio admin ui (#732)
diff --git a/front/admin_ui/README.md b/front/admin_ui/README.md new file mode 100644 index 00000000..d655de97 --- /dev/null +++ b/front/admin_ui/README.md @@ -0,0 +1,21 @@ +## Datasets-server Admin UI + +### Setup: + +``` +poetry install +``` + +### Run: + +To connect to the PROD endpoint: + +``` +poetry run python app.py +``` + +To connect to your local DEV endpoint: + +``` +DEV=1 poetry run python app.py +``` diff --git a/front/admin_ui/app.py b/front/admin_ui/app.py new file mode 100644 index 00000000..23aaf1e4 --- /dev/null +++ b/front/admin_ui/app.py @@ -0,0 +1,121 @@ +import os + +import pandas as pd +import requests +import gradio as gr +import matplotlib.pyplot as plt +import matplotlib +import huggingface_hub as hfh +import duckdb + +matplotlib.use('SVG') + +DEV = os.environ.get("DEV", False) +HF_ENDPOINT = os.environ.get("HF_ENDPOINT", "https://huggingface.co") +PROD_DSS_ENDPOINT = os.environ.get("PROD_DSS_ENDPOINT", "https://datasets-server.huggingface.co") +DEV_DSS_ENDPOINT = os.environ.get("DEV_DSS_ENDPOINT", "http://localhost:8100") +ADMIN_HF_ORGANIZATION = os.environ.get("ADMIN_HF_ORGANIZATION", "huggingface") + +DSS_ENDPOINT = DEV_DSS_ENDPOINT if DEV else PROD_DSS_ENDPOINT + +pending_jobs_df = None + +def healthcheck(): + response = requests.head(f"{DSS_ENDPOINT}/admin/pending-jobs", timeout=10) + if response.status_code == 401: + return f"*Connected to {DSS_ENDPOINT}*" + + else: + return f"❌ Failed to connect to {DSS_ENDPOINT} (error {response.status_code})" + + + +with gr.Blocks() as demo: + gr.Markdown(" ## Datasets-server admin page") + gr.Markdown(healthcheck) + + with gr.Row() as auth_page: + with gr.Column(): + auth_title = gr.Markdown("Enter your token ([settings](https://huggingface.co/settings/tokens)):") + token_box = gr.Textbox(label="token", placeholder="hf_xxx", type="password") + auth_error = gr.Markdown("", visible=False) + + with gr.Row(visible=False) as main_page: + with gr.Column(): + welcome_title = gr.Markdown("") + with gr.Tab("View pending jobs"): + fetch_pending_jobs_button = gr.Button("Fetch pending jobs") + gr.Markdown("### Pending jobs summary") + pending_jobs_summary_table = gr.DataFrame(pd.DataFrame({"Jobs": [], "Waiting": [], "Started": []})) + gr.Markdown("### Most recent") + recent_pending_jobs_table = gr.DataFrame() + gr.Markdown("### Query the pending jobs table") + pending_jobs_query = gr.Textbox( + label="Query pending_jobs_df", + placeholder="SELECT * FROM pending_jobs_df WHERE dataset LIKE 'allenai/c4", + value="SELECT * FROM pending_jobs_df WHERE dataset LIKE 'allenai/c4'", + lines=3, + ) + query_pending_jobs_button = gr.Button("Run") + pending_jobs_query_result_df = gr.DataFrame() + + def auth(token): + if not token: + return {auth_error: gr.update(value="", visible=False)} + try: + user = hfh.whoami(token=token) + except requests.HTTPError as err: + return {auth_error: gr.update(value=f"❌ Error ({err})", visible=True)} + orgs = [org["name"] for org in user["orgs"]] + if ADMIN_HF_ORGANIZATION in orgs: + return { + auth_page: gr.update(visible=False), + welcome_title: gr.update(value=f"### Welcome {user['name']}"), + main_page: gr.update(visible=True) + } + else: + return f"❌ Unauthorized (user '{user['name']} is not a member of '{ADMIN_HF_ORGANIZATION}')" + + def view_jobs(token): + global pending_jobs_df + headers = {"Authorization": f"Bearer {token}"} + response = requests.get(f"{DSS_ENDPOINT}/admin/pending-jobs", headers=headers, timeout=60) + if response.status_code == 200: + pending_jobs = response.json() + pending_jobs_df = pd.DataFrame([ + job + for job_type in pending_jobs + for job_state in pending_jobs[job_type] + for job in pending_jobs[job_type][job_state] + ]) + pending_jobs_df["created_at"] = pd.to_datetime(pending_jobs_df["created_at"], errors="coerce") + return { + pending_jobs_summary_table: gr.update(visible=True, value=pd.DataFrame({ + "Jobs": list(pending_jobs), + "Waiting": [len(pending_jobs[job_type]["waiting"]) for job_type in pending_jobs], + "Started": [len(pending_jobs[job_type]["started"]) for job_type in pending_jobs], + })), + recent_pending_jobs_table: gr.update(value=pending_jobs_df.nlargest(5, "created_at")) + } + else: + return { + pending_jobs_summary_table: gr.update(visible=True, value=pd.DataFrame({"Error": [f"❌ Failed to view pending jobs to {DSS_ENDPOINT} (error {response.status_code})"]})), + recent_pending_jobs_table: gr.update(value=None) + } + + def query_jobs(pending_jobs_query): + global pending_jobs_df + try: + result = duckdb.query(pending_jobs_query).to_df() + except (duckdb.ParserException, duckdb.CatalogException, duckdb.BinderException) as error: + return {pending_jobs_query_result_df: gr.update(value=pd.DataFrame({"Error": [f"❌ {str(error)}"]}))} + return {pending_jobs_query_result_df: gr.update(value=result)} + + token_box.change(auth, inputs=token_box, outputs=[auth_error, welcome_title, auth_page, main_page]) + fetch_pending_jobs_button.click(view_jobs, inputs=token_box, outputs=[recent_pending_jobs_table, pending_jobs_summary_table]) + query_pending_jobs_button.click(query_jobs, inputs=pending_jobs_query, outputs=[pending_jobs_query_result_df]) + + + +if __name__ == "__main__": + demo.launch() diff --git a/front/admin_ui/poetry.lock b/front/admin_ui/poetry.lock new file mode 100644 index 00000000..49b44cd7 --- /dev/null +++ b/front/admin_ui/poetry.lock @@ -0,0 +1,1947 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "22.1.0" +description = "File support for asyncio." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "aiofiles-22.1.0-py3-none-any.whl", hash = "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad"}, + {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, +] + +[[package]] +name = "aiohttp" +version = "3.7.4.post0" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.7.4.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win32.whl", hash = "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win32.whl", hash = "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win32.whl", hash = "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win_amd64.whl", hash = "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win32.whl", hash = "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win_amd64.whl", hash = "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe"}, + {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, +] + +[package.dependencies] +async-timeout = ">=3.0,<4.0" +attrs = ">=17.3.0" +chardet = ">=2.0,<5.0" +multidict = ">=4.5,<7.0" +typing-extensions = ">=3.6.5" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotlipy", "cchardet"] + +[[package]] +name = "altair" +version = "4.2.2" +description = "Altair: A declarative statistical visualization library for Python." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "altair-4.2.2-py3-none-any.whl", hash = "sha256:8b45ebeaf8557f2d760c5c77b79f02ae12aee7c46c27c06014febab6f849bc87"}, + {file = "altair-4.2.2.tar.gz", hash = "sha256:39399a267c49b30d102c10411e67ab26374156a84b1aeb9fcd15140429ba49c5"}, +] + +[package.dependencies] +entrypoints = "*" +jinja2 = "*" +jsonschema = ">=3.0" +numpy = "*" +pandas = ">=0.18" +toolz = "*" + +[package.extras] +dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pytest", "recommonmark", "sphinx", "vega-datasets"] + +[[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + +[[package]] +name = "async-timeout" +version = "3.0.1" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.5.3" +files = [ + {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, + {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, +] + +[[package]] +name = "attrs" +version = "22.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.0.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.0.7" +description = "Python library for calculating contours of 2D quadrilateral grids" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "contourpy-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95c3acddf921944f241b6773b767f1cbce71d03307270e2d769fd584d5d1092d"}, + {file = "contourpy-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc1464c97579da9f3ab16763c32e5c5d5bb5fa1ec7ce509a4ca6108b61b84fab"}, + {file = "contourpy-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8acf74b5d383414401926c1598ed77825cd530ac7b463ebc2e4f46638f56cce6"}, + {file = "contourpy-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c71fdd8f1c0f84ffd58fca37d00ca4ebaa9e502fb49825484da075ac0b0b803"}, + {file = "contourpy-1.0.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99e9486bf1bb979d95d5cffed40689cb595abb2b841f2991fc894b3452290e8"}, + {file = "contourpy-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f4d8941a9564cda3f7fa6a6cd9b32ec575830780677932abdec7bcb61717b0"}, + {file = "contourpy-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9e20e5a1908e18aaa60d9077a6d8753090e3f85ca25da6e25d30dc0a9e84c2c6"}, + {file = "contourpy-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a877ada905f7d69b2a31796c4b66e31a8068b37aa9b78832d41c82fc3e056ddd"}, + {file = "contourpy-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6381fa66866b0ea35e15d197fc06ac3840a9b2643a6475c8fff267db8b9f1e69"}, + {file = "contourpy-1.0.7-cp310-cp310-win32.whl", hash = "sha256:3c184ad2433635f216645fdf0493011a4667e8d46b34082f5a3de702b6ec42e3"}, + {file = "contourpy-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:3caea6365b13119626ee996711ab63e0c9d7496f65641f4459c60a009a1f3e80"}, + {file = "contourpy-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed33433fc3820263a6368e532f19ddb4c5990855e4886088ad84fd7c4e561c71"}, + {file = "contourpy-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:38e2e577f0f092b8e6774459317c05a69935a1755ecfb621c0a98f0e3c09c9a5"}, + {file = "contourpy-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ae90d5a8590e5310c32a7630b4b8618cef7563cebf649011da80874d0aa8f414"}, + {file = "contourpy-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130230b7e49825c98edf0b428b7aa1125503d91732735ef897786fe5452b1ec2"}, + {file = "contourpy-1.0.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58569c491e7f7e874f11519ef46737cea1d6eda1b514e4eb5ac7dab6aa864d02"}, + {file = "contourpy-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54d43960d809c4c12508a60b66cb936e7ed57d51fb5e30b513934a4a23874fae"}, + {file = "contourpy-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:152fd8f730c31fd67fe0ffebe1df38ab6a669403da93df218801a893645c6ccc"}, + {file = "contourpy-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9056c5310eb1daa33fc234ef39ebfb8c8e2533f088bbf0bc7350f70a29bde1ac"}, + {file = "contourpy-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a9d7587d2fdc820cc9177139b56795c39fb8560f540bba9ceea215f1f66e1566"}, + {file = "contourpy-1.0.7-cp311-cp311-win32.whl", hash = "sha256:4ee3ee247f795a69e53cd91d927146fb16c4e803c7ac86c84104940c7d2cabf0"}, + {file = "contourpy-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:5caeacc68642e5f19d707471890f037a13007feba8427eb7f2a60811a1fc1350"}, + {file = "contourpy-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd7dc0e6812b799a34f6d12fcb1000539098c249c8da54f3566c6a6461d0dbad"}, + {file = "contourpy-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0f9d350b639db6c2c233d92c7f213d94d2e444d8e8fc5ca44c9706cf72193772"}, + {file = "contourpy-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e96a08b62bb8de960d3a6afbc5ed8421bf1a2d9c85cc4ea73f4bc81b4910500f"}, + {file = "contourpy-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:031154ed61f7328ad7f97662e48660a150ef84ee1bc8876b6472af88bf5a9b98"}, + {file = "contourpy-1.0.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e9ebb4425fc1b658e13bace354c48a933b842d53c458f02c86f371cecbedecc"}, + {file = "contourpy-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efb8f6d08ca7998cf59eaf50c9d60717f29a1a0a09caa46460d33b2924839dbd"}, + {file = "contourpy-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6c180d89a28787e4b73b07e9b0e2dac7741261dbdca95f2b489c4f8f887dd810"}, + {file = "contourpy-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b8d587cc39057d0afd4166083d289bdeff221ac6d3ee5046aef2d480dc4b503c"}, + {file = "contourpy-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:769eef00437edf115e24d87f8926955f00f7704bede656ce605097584f9966dc"}, + {file = "contourpy-1.0.7-cp38-cp38-win32.whl", hash = "sha256:62398c80ef57589bdbe1eb8537127321c1abcfdf8c5f14f479dbbe27d0322e66"}, + {file = "contourpy-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:57119b0116e3f408acbdccf9eb6ef19d7fe7baf0d1e9aaa5381489bc1aa56556"}, + {file = "contourpy-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30676ca45084ee61e9c3da589042c24a57592e375d4b138bd84d8709893a1ba4"}, + {file = "contourpy-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e927b3868bd1e12acee7cc8f3747d815b4ab3e445a28d2e5373a7f4a6e76ba1"}, + {file = "contourpy-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:366a0cf0fc079af5204801786ad7a1c007714ee3909e364dbac1729f5b0849e5"}, + {file = "contourpy-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ba9bb365446a22411f0673abf6ee1fea3b2cf47b37533b970904880ceb72f3"}, + {file = "contourpy-1.0.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b0bf0c30d432278793d2141362ac853859e87de0a7dee24a1cea35231f0d50"}, + {file = "contourpy-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7281244c99fd7c6f27c1c6bfafba878517b0b62925a09b586d88ce750a016d2"}, + {file = "contourpy-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b6d0f9e1d39dbfb3977f9dd79f156c86eb03e57a7face96f199e02b18e58d32a"}, + {file = "contourpy-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f6979d20ee5693a1057ab53e043adffa1e7418d734c1532e2d9e915b08d8ec2"}, + {file = "contourpy-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5dd34c1ae752515318224cba7fc62b53130c45ac6a1040c8b7c1a223c46e8967"}, + {file = "contourpy-1.0.7-cp39-cp39-win32.whl", hash = "sha256:c5210e5d5117e9aec8c47d9156d1d3835570dd909a899171b9535cb4a3f32693"}, + {file = "contourpy-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:60835badb5ed5f4e194a6f21c09283dd6e007664a86101431bf870d9e86266c4"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ce41676b3d0dd16dbcfabcc1dc46090aaf4688fd6e819ef343dbda5a57ef0161"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a011cf354107b47c58ea932d13b04d93c6d1d69b8b6dce885e642531f847566"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31a55dccc8426e71817e3fe09b37d6d48ae40aae4ecbc8c7ad59d6893569c436"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69f8ff4db108815addd900a74df665e135dbbd6547a8a69333a68e1f6e368ac2"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efe99298ba37e37787f6a2ea868265465410822f7bea163edcc1bd3903354ea9"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a1e97b86f73715e8670ef45292d7cc033548266f07d54e2183ecb3c87598888f"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc331c13902d0f50845099434cd936d49d7a2ca76cb654b39691974cb1e4812d"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24847601071f740837aefb730e01bd169fbcaa610209779a78db7ebb6e6a7051"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abf298af1e7ad44eeb93501e40eb5a67abbf93b5d90e468d01fc0c4451971afa"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:64757f6460fc55d7e16ed4f1de193f362104285c667c112b50a804d482777edd"}, + {file = "contourpy-1.0.7.tar.gz", hash = "sha256:d8165a088d31798b59e91117d1f5fc3df8168d8b48c4acc10fc0df0d0bdbcc5e"}, +] + +[package.dependencies] +numpy = ">=1.16" + +[package.extras] +bokeh = ["bokeh", "chromedriver", "selenium"] +docs = ["furo", "sphinx-copybutton"] +mypy = ["contourpy[bokeh]", "docutils-stubs", "mypy (==0.991)", "types-Pillow"] +test = ["Pillow", "matplotlib", "pytest"] +test-no-images = ["pytest"] + +[[package]] +name = "cycler" +version = "0.11.0" +description = "Composable style cycles" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, + {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"}, +] + +[[package]] +name = "duckdb" +version = "0.6.1" +description = "DuckDB embedded database" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "duckdb-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e566514f9327f89264e98ac14ee7a84fbd9857328028258422c3e8375ee19d25"}, + {file = "duckdb-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b31c2883de5b19591a2852165e6b3f9821f77af649835f27bc146b26e4aa30cb"}, + {file = "duckdb-0.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:998165b2fb1f1d2b0ad742096015ea70878f7d40304643c7424c3ed3ddf07bfc"}, + {file = "duckdb-0.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3941b3a1e8a1cdb7b90ab3917b87af816e71f9692e5ada7f19b6b60969f731e5"}, + {file = "duckdb-0.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:143611bd1b7c13343f087d4d423a7a8a4f33a114c5326171e867febf3f0fcfe1"}, + {file = "duckdb-0.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:125ba45e8b08f28858f918ec9cbd3a19975e5d8d9e8275ef4ad924028a616e14"}, + {file = "duckdb-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e609a65b31c92f2f7166831f74b56f5ed54b33d8c2c4b4c3974c26fdc50464c5"}, + {file = "duckdb-0.6.1-cp310-cp310-win32.whl", hash = "sha256:b39045074fb9a3f068496475a5d627ad4fa572fa3b4980e3b479c11d0b706f2d"}, + {file = "duckdb-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:16fa96ffaa3d842a9355a633fb8bc092d119be08d4bc02013946d8594417bc14"}, + {file = "duckdb-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4bbe2f6c1b109c626f9318eee80934ad2a5b81a51409c6b5083c6c5f9bdb125"}, + {file = "duckdb-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cfea36b58928ce778d17280d4fb3bf0a2d7cff407667baedd69c5b41463ac0fd"}, + {file = "duckdb-0.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b64eb53d0d0695814bf1b65c0f91ab7ed66b515f89c88038f65ad5e0762571c"}, + {file = "duckdb-0.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35b01bc724e1933293f4c34f410d2833bfbb56d5743b515d805bbfed0651476e"}, + {file = "duckdb-0.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fec2c2466654ce786843bda2bfba71e0e4719106b41d36b17ceb1901e130aa71"}, + {file = "duckdb-0.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82cd30f5cf368658ef879b1c60276bc8650cf67cfe3dc3e3009438ba39251333"}, + {file = "duckdb-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a782bbfb7f5e97d4a9c834c9e78f023fb8b3f6687c22ca99841e6ed944b724da"}, + {file = "duckdb-0.6.1-cp311-cp311-win32.whl", hash = "sha256:e3702d4a9ade54c6403f6615a98bbec2020a76a60f5db7fcf085df1bd270e66e"}, + {file = "duckdb-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:93b074f473d68c944b0eeb2edcafd91ad11da8432b484836efaaab4e26351d48"}, + {file = "duckdb-0.6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:adae183924d6d479202c39072e37d440b511326e84525bcb7432bca85f86caba"}, + {file = "duckdb-0.6.1-cp36-cp36m-win32.whl", hash = "sha256:546a1cd17595bd1dd009daf6f36705aa6f95337154360ce44932157d353dcd80"}, + {file = "duckdb-0.6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:87b0d00eb9d1a7ebe437276203e0cdc93b4a2154ba9688c65e8d2a8735839ec6"}, + {file = "duckdb-0.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8442e074de6e1969c3d2b24363a5a6d7f866d5ac3f4e358e357495b389eff6c1"}, + {file = "duckdb-0.6.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a6bf2ae7bec803352dade14561cb0b461b2422e70f75d9f09b36ba2dad2613b"}, + {file = "duckdb-0.6.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5054792f22733f89d9cbbced2bafd8772d72d0fe77f159310221cefcf981c680"}, + {file = "duckdb-0.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:21cc503dffc2c68bb825e4eb3098e82f40e910b3d09e1b3b7f090d39ad53fbea"}, + {file = "duckdb-0.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54b3da77ad893e99c073087ff7f75a8c98154ac5139d317149f12b74367211db"}, + {file = "duckdb-0.6.1-cp37-cp37m-win32.whl", hash = "sha256:f1d709aa6a26172a3eab804b57763d5cdc1a4b785ac1fc2b09568578e52032ee"}, + {file = "duckdb-0.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f4edcaa471d791393e37f63e3c7c728fa6324e3ac7e768b9dc2ea49065cd37cc"}, + {file = "duckdb-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d218c2dd3bda51fb79e622b7b2266183ac9493834b55010aa01273fa5b7a7105"}, + {file = "duckdb-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c7155cb93ab432eca44b651256c359281d26d927ff43badaf1d2276dd770832"}, + {file = "duckdb-0.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0925778200090d3d5d8b6bb42b4d05d24db1e8912484ba3b7e7b7f8569f17dcb"}, + {file = "duckdb-0.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b544dd04bb851d08bc68b317a7683cec6091547ae75555d075f8c8a7edb626e"}, + {file = "duckdb-0.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2c37d5a0391cf3a3a66e63215968ffb78e6b84f659529fa4bd10478f6203071"}, + {file = "duckdb-0.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ce376966260eb5c351fcc6af627a979dbbcae3efeb2e70f85b23aa45a21e289d"}, + {file = "duckdb-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73c974b09dd08dff5e8bdedba11c7d0aa0fc46ca93954ee7d19e1e18c9883ac1"}, + {file = "duckdb-0.6.1-cp38-cp38-win32.whl", hash = "sha256:bfe39ed3a03e8b1ed764f58f513b37b24afe110d245803a41655d16d391ad9f1"}, + {file = "duckdb-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:afa97d982dbe6b125631a17e222142e79bee88f7a13fc4cee92d09285e31ec83"}, + {file = "duckdb-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c35ff4b1117096ef72d101524df0079da36c3735d52fcf1d907ccffa63bd6202"}, + {file = "duckdb-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c54910fbb6de0f21d562e18a5c91540c19876db61b862fc9ffc8e31be8b3f03"}, + {file = "duckdb-0.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99a7172563a3ae67d867572ce27cf3962f58e76f491cb7f602f08c2af39213b3"}, + {file = "duckdb-0.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7363ffe857d00216b659116647fbf1e925cb3895699015d4a4e50b746de13041"}, + {file = "duckdb-0.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06c1cef25f896b2284ba048108f645c72fab5c54aa5a6f62f95663f44ff8a79b"}, + {file = "duckdb-0.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e92dd6aad7e8c29d002947376b6f5ce28cae29eb3b6b58a64a46cdbfc5cb7943"}, + {file = "duckdb-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b280b2d8a01ecd4fe2feab041df70233c534fafbe33a38565b52c1e017529c7"}, + {file = "duckdb-0.6.1-cp39-cp39-win32.whl", hash = "sha256:d9212d76e90b8469743924a4d22bef845be310d0d193d54ae17d9ef1f753cfa7"}, + {file = "duckdb-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:00b7be8f67ec1a8edaa8844f521267baa1a795f4c482bfad56c72c26e1862ab2"}, + {file = "duckdb-0.6.1.tar.gz", hash = "sha256:6d26e9f1afcb924a6057785e506810d48332d4764ddc4a5b414d0f2bf0cacfb4"}, +] + +[package.dependencies] +numpy = ">=1.14" + +[[package]] +name = "entrypoints" +version = "0.4" +description = "Discover and load entry points from installed packages." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, +] + +[[package]] +name = "fastapi" +version = "0.89.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fastapi-0.89.1-py3-none-any.whl", hash = "sha256:f9773ea22290635b2f48b4275b2bf69a8fa721fda2e38228bed47139839dc877"}, + {file = "fastapi-0.89.1.tar.gz", hash = "sha256:15d9271ee52b572a015ca2ae5c72e1ce4241dd8532a534ad4f7ec70c376a580f"}, +] + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = "0.22.0" + +[package.extras] +all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>=0.12.0,<0.21.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.8.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.10.0)", "coverage[toml] (>=6.5.0,<8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<1.4.43)", "types-orjson (==3.6.2)", "types-ujson (==5.6.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] + +[[package]] +name = "ffmpy" +version = "0.3.0" +description = "A simple Python wrapper for ffmpeg" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "ffmpy-0.3.0.tar.gz", hash = "sha256:757591581eee25b4a50ac9ffb9b58035a2794533db47e0512f53fb2d7b6f9adc"}, +] + +[[package]] +name = "filelock" +version = "3.9.0" +description = "A platform independent file lock." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, +] + +[package.extras] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "fonttools" +version = "4.38.0" +description = "Tools to manipulate font files" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fonttools-4.38.0-py3-none-any.whl", hash = "sha256:820466f43c8be8c3009aef8b87e785014133508f0de64ec469e4efb643ae54fb"}, + {file = "fonttools-4.38.0.zip", hash = "sha256:2bb244009f9bf3fa100fc3ead6aeb99febe5985fa20afbfbaa2f8946c2fbdaf1"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=14.0.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "scipy"] +lxml = ["lxml (>=4.0,<5)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=14.0.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "fsspec" +version = "2023.1.0" +description = "File-system specification" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fsspec-2023.1.0-py3-none-any.whl", hash = "sha256:b833e2e541e9e8cde0ab549414187871243177feb3d344f9d27b25a93f5d8139"}, + {file = "fsspec-2023.1.0.tar.gz", hash = "sha256:fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +entrypoints = ["importlib-metadata"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gradio" +version = "3.16.2" +description = "Python library for easily interacting with trained machine learning models" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gradio-3.16.2-py3-none-any.whl", hash = "sha256:87f48d269003ee966ef30b0a4002db22d083265e46bf6cd48872fdce665f1a4b"}, + {file = "gradio-3.16.2.tar.gz", hash = "sha256:53fa92f3c286f42904b907ae6d852f5b50be7cd6d4bbb9c375c982d0f71a5afa"}, +] + +[package.dependencies] +aiofiles = "*" +aiohttp = "*" +altair = ">=4.2.0" +fastapi = "*" +ffmpy = "*" +fsspec = "*" +httpx = "*" +jinja2 = "*" +markdown-it-py = {version = "*", extras = ["linkify", "plugins"]} +markupsafe = "*" +matplotlib = "*" +numpy = "*" +orjson = "*" +pandas = "*" +pillow = "*" +pycryptodome = "*" +pydantic = "*" +pydub = "*" +python-multipart = "*" +pyyaml = "*" +requests = "*" +typing-extensions = "*" +uvicorn = "*" +websockets = ">=10.0" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "huggingface-hub" +version = "0.12.0" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, +] + +[package.dependencies] +filelock = "*" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "kiwisolver" +version = "1.4.4" +description = "A fast implementation of the Cassowary constraint solver" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6"}, + {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c"}, + {file = "kiwisolver-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32"}, + {file = "kiwisolver-1.4.4-cp310-cp310-win32.whl", hash = "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408"}, + {file = "kiwisolver-1.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004"}, + {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ea21f66820452a3f5d1655f8704a60d66ba1191359b96541eaf457710a5fc6"}, + {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc9db8a3efb3e403e4ecc6cd9489ea2bac94244f80c78e27c31dcc00d2790ac2"}, + {file = "kiwisolver-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5b61785a9ce44e5a4b880272baa7cf6c8f48a5180c3e81c59553ba0cb0821ca"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2dbb44c3f7e6c4d3487b31037b1bdbf424d97687c1747ce4ff2895795c9bf69"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295ecd49304dcf3bfbfa45d9a081c96509e95f4b9d0eb7ee4ec0530c4a96514"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bd472dbe5e136f96a4b18f295d159d7f26fd399136f5b17b08c4e5f498cd494"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf7d9fce9bcc4752ca4a1b80aabd38f6d19009ea5cbda0e0856983cf6d0023f5"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d6601aed50c74e0ef02f4204da1816147a6d3fbdc8b3872d263338a9052c51"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:877272cf6b4b7e94c9614f9b10140e198d2186363728ed0f701c6eee1baec1da"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:db608a6757adabb32f1cfe6066e39b3706d8c3aa69bbc353a5b61edad36a5cb4"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5853eb494c71e267912275e5586fe281444eb5e722de4e131cddf9d442615626"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f0a1dbdb5ecbef0d34eb77e56fcb3e95bbd7e50835d9782a45df81cc46949750"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:283dffbf061a4ec60391d51e6155e372a1f7a4f5b15d59c8505339454f8989e4"}, + {file = "kiwisolver-1.4.4-cp311-cp311-win32.whl", hash = "sha256:d06adcfa62a4431d404c31216f0f8ac97397d799cd53800e9d3efc2fbb3cf14e"}, + {file = "kiwisolver-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e7da3fec7408813a7cebc9e4ec55afed2d0fd65c4754bc376bf03498d4e92686"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-win32.whl", hash = "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166"}, + {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454"}, + {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0"}, + {file = "kiwisolver-1.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c"}, + {file = "kiwisolver-1.4.4-cp38-cp38-win32.whl", hash = "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191"}, + {file = "kiwisolver-1.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766"}, + {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8"}, + {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897"}, + {file = "kiwisolver-1.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9"}, + {file = "kiwisolver-1.4.4-cp39-cp39-win32.whl", hash = "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea"}, + {file = "kiwisolver-1.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28bc5b299f48150b5f822ce68624e445040595a4ac3d59251703779836eceff9"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81e38381b782cc7e1e46c4e14cd997ee6040768101aefc8fa3c24a4cc58e98f8"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2a66fdfb34e05b705620dd567f5a03f239a088d5a3f321e7b6ac3239d22aa286"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:872b8ca05c40d309ed13eb2e582cab0c5a05e81e987ab9c521bf05ad1d5cf5cb"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:70e7c2e7b750585569564e2e5ca9845acfaa5da56ac46df68414f29fea97be9f"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9f85003f5dfa867e86d53fac6f7e6f30c045673fa27b603c397753bebadc3008"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e307eb9bd99801f82789b44bb45e9f541961831c7311521b13a6c85afc09767"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1792d939ec70abe76f5054d3f36ed5656021dcad1322d1cc996d4e54165cef9"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cb459eea32a4e2cf18ba5fcece2dbdf496384413bc1bae15583f19e567f3b2"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36dafec3d6d6088d34e2de6b85f9d8e2324eb734162fba59d2ba9ed7a2043d5b"}, + {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"}, +] + +[[package]] +name = "linkify-it-py" +version = "1.0.3" +description = "Links recognition library with FULL unicode support." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "linkify-it-py-1.0.3.tar.gz", hash = "sha256:2b3f168d5ce75e3a425e34b341a6b73e116b5d9ed8dbbbf5dc7456843b7ce2ee"}, + {file = "linkify_it_py-1.0.3-py3-none-any.whl", hash = "sha256:11e29f00150cddaa8f434153f103c14716e7e097a8fd372d9eb1ed06ed91524d"}, +] + +[package.dependencies] +uc-micro-py = "*" + +[package.extras] +benchmark = ["pytest", "pytest-benchmark"] +dev = ["black", "flake8", "isort", "pre-commit"] +doc = ["myst-parser", "sphinx", "sphinx-book-theme"] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "markdown-it-py" +version = "2.1.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"}, + {file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"}, +] + +[package.dependencies] +linkify-it-py = {version = ">=1.0,<2.0", optional = true, markers = "extra == \"linkify\""} +mdit-py-plugins = {version = "*", optional = true, markers = "extra == \"plugins\""} +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"] +code-style = ["pre-commit (==2.6)"] +compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"] +linkify = ["linkify-it-py (>=1.0,<2.0)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.2" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] + +[[package]] +name = "matplotlib" +version = "3.6.3" +description = "Python plotting package" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib-3.6.3-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:80c166a0e28512e26755f69040e6bf2f946a02ffdb7c00bf6158cca3d2b146e6"}, + {file = "matplotlib-3.6.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eb9421c403ffd387fbe729de6d9a03005bf42faba5e8432f4e51e703215b49fc"}, + {file = "matplotlib-3.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5223affa21050fb6118353c1380c15e23aedfb436bf3e162c26dc950617a7519"}, + {file = "matplotlib-3.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00c248ab6b92bea3f8148714837937053a083ff03b4c5e30ed37e28fc0e7e56"}, + {file = "matplotlib-3.6.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca94f0362f6b6f424b555b956971dcb94b12d0368a6c3e07dc7a40d32d6d873d"}, + {file = "matplotlib-3.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59400cc9451094b7f08cc3f321972e6e1db4cd37a978d4e8a12824bf7fd2f03b"}, + {file = "matplotlib-3.6.3-cp310-cp310-win32.whl", hash = "sha256:57ad1aee29043163374bfa8990e1a2a10ff72c9a1bfaa92e9c46f6ea59269121"}, + {file = "matplotlib-3.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:1fcc4cad498533d3c393a160975acc9b36ffa224d15a6b90ae579eacee5d8579"}, + {file = "matplotlib-3.6.3-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:d2cfaa7fd62294d945b8843ea24228a27c8e7c5b48fa634f3c168153b825a21b"}, + {file = "matplotlib-3.6.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c3f08df2ac4636249b8bc7a85b8b82c983bef1441595936f62c2918370ca7e1d"}, + {file = "matplotlib-3.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff2aa84e74f80891e6bcf292ebb1dd57714ffbe13177642d65fee25384a30894"}, + {file = "matplotlib-3.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11011c97d62c1db7bc20509572557842dbb8c2a2ddd3dd7f20501aa1cde3e54e"}, + {file = "matplotlib-3.6.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c235bf9be052347373f589e018988cad177abb3f997ab1a2e2210c41562cc0c"}, + {file = "matplotlib-3.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bebcff4c3ed02c6399d47329f3554193abd824d3d53b5ca02cf583bcd94470e2"}, + {file = "matplotlib-3.6.3-cp311-cp311-win32.whl", hash = "sha256:d5f18430f5cfa5571ab8f4c72c89af52aa0618e864c60028f11a857d62200cba"}, + {file = "matplotlib-3.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:dfba7057609ca9567b9704626756f0142e97ec8c5ba2c70c6e7bd1c25ef99f06"}, + {file = "matplotlib-3.6.3-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:9fb8fb19d03abf3c5dab89a8677e62c4023632f919a62b6dd1d6d2dbf42cd9f5"}, + {file = "matplotlib-3.6.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:bbf269e1d24bc25247095d71c7a969813f7080e2a7c6fa28931a603f747ab012"}, + {file = "matplotlib-3.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:994637e2995b0342699b396a320698b07cd148bbcf2dd2fa2daba73f34dd19f2"}, + {file = "matplotlib-3.6.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77b384cee7ab8cf75ffccbfea351a09b97564fc62d149827a5e864bec81526e5"}, + {file = "matplotlib-3.6.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:73b93af33634ed919e72811c9703e1105185cd3fb46d76f30b7f4cfbbd063f89"}, + {file = "matplotlib-3.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:debeab8e2ab07e5e3dac33e12456da79c7e104270d2b2d1df92b9e40347cca75"}, + {file = "matplotlib-3.6.3-cp38-cp38-win32.whl", hash = "sha256:acc3b1a4bddbf56fe461e36fb9ef94c2cb607fc90d24ccc650040bfcc7610de4"}, + {file = "matplotlib-3.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:1183877d008c752d7d535396096c910f4663e4b74a18313adee1213328388e1e"}, + {file = "matplotlib-3.6.3-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:6adc441b5b2098a4b904bbf9d9e92fb816fef50c55aa2ea6a823fc89b94bb838"}, + {file = "matplotlib-3.6.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6d81b11ede69e3a751424b98dc869c96c10256b2206bfdf41f9c720eee86844c"}, + {file = "matplotlib-3.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:29f17b7f2e068dc346687cbdf80b430580bab42346625821c2d3abf3a1ec5417"}, + {file = "matplotlib-3.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f56a7252eee8f3438447f75f5e1148a1896a2756a92285fe5d73bed6deebff4"}, + {file = "matplotlib-3.6.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbddfeb1495484351fb5b30cf5bdf06b3de0bc4626a707d29e43dfd61af2a780"}, + {file = "matplotlib-3.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809119d1cba3ece3c9742eb01827fe7a0e781ea3c5d89534655a75e07979344f"}, + {file = "matplotlib-3.6.3-cp39-cp39-win32.whl", hash = "sha256:e0a64d7cc336b52e90f59e6d638ae847b966f68582a7af041e063d568e814740"}, + {file = "matplotlib-3.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:79e501eb847f4a489eb7065bb8d3187117f65a4c02d12ea3a19d6c5bef173bcc"}, + {file = "matplotlib-3.6.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2787a16df07370dcba385fe20cdd0cc3cfaabd3c873ddabca78c10514c799721"}, + {file = "matplotlib-3.6.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68d94a436f62b8a861bf3ace82067a71bafb724b4e4f9133521e4d8012420dd7"}, + {file = "matplotlib-3.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b409b2790cf8d7c1ef35920f01676d2ae7afa8241844e7aa5484fdf493a9a0"}, + {file = "matplotlib-3.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:faff486b36530a836a6b4395850322e74211cd81fc17f28b4904e1bd53668e3e"}, + {file = "matplotlib-3.6.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:38d38cb1ea1d80ee0f6351b65c6f76cad6060bbbead015720ba001348ae90f0c"}, + {file = "matplotlib-3.6.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f999661589981e74d793ee2f41b924b3b87d65fd929f6153bf0f30675c59b1"}, + {file = "matplotlib-3.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01b7f521a9a73c383825813af255f8c4485d1706e4f3e2ed5ae771e4403a40ab"}, + {file = "matplotlib-3.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9ceebaf73f1a3444fa11014f38b9da37ff7ea328d6efa1652241fe3777bfdab9"}, + {file = "matplotlib-3.6.3.tar.gz", hash = "sha256:1f4d69707b1677560cd952544ee4962f68ff07952fb9069ff8c12b56353cb8c9"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.0.1" +numpy = ">=1.19" +packaging = ">=20.0" +pillow = ">=6.2.0" +pyparsing = ">=2.2.1" +python-dateutil = ">=2.7" + +[[package]] +name = "mdit-py-plugins" +version = "0.3.3" +description = "Collection of plugins for markdown-it-py" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdit-py-plugins-0.3.3.tar.gz", hash = "sha256:5cfd7e7ac582a594e23ba6546a2f406e94e42eb33ae596d0734781261c251260"}, + {file = "mdit_py_plugins-0.3.3-py3-none-any.whl", hash = "sha256:36d08a29def19ec43acdcd8ba471d3ebab132e7879d442760d963f19913e04b9"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<3.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "numpy" +version = "1.24.1" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:179a7ef0889ab769cc03573b6217f54c8bd8e16cef80aad369e1e8185f994cd7"}, + {file = "numpy-1.24.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b09804ff570b907da323b3d762e74432fb07955701b17b08ff1b5ebaa8cfe6a9"}, + {file = "numpy-1.24.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b739841821968798947d3afcefd386fa56da0caf97722a5de53e07c4ccedc7"}, + {file = "numpy-1.24.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e3463e6ac25313462e04aea3fb8a0a30fb906d5d300f58b3bc2c23da6a15398"}, + {file = "numpy-1.24.1-cp310-cp310-win32.whl", hash = "sha256:b31da69ed0c18be8b77bfce48d234e55d040793cebb25398e2a7d84199fbc7e2"}, + {file = "numpy-1.24.1-cp310-cp310-win_amd64.whl", hash = "sha256:b07b40f5fb4fa034120a5796288f24c1fe0e0580bbfff99897ba6267af42def2"}, + {file = "numpy-1.24.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7094891dcf79ccc6bc2a1f30428fa5edb1e6fb955411ffff3401fb4ea93780a8"}, + {file = "numpy-1.24.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e418681372520c992805bb723e29d69d6b7aa411065f48216d8329d02ba032"}, + {file = "numpy-1.24.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e274f0f6c7efd0d577744f52032fdd24344f11c5ae668fe8d01aac0422611df1"}, + {file = "numpy-1.24.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0044f7d944ee882400890f9ae955220d29b33d809a038923d88e4e01d652acd9"}, + {file = "numpy-1.24.1-cp311-cp311-win32.whl", hash = "sha256:442feb5e5bada8408e8fcd43f3360b78683ff12a4444670a7d9e9824c1817d36"}, + {file = "numpy-1.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:de92efa737875329b052982e37bd4371d52cabf469f83e7b8be9bb7752d67e51"}, + {file = "numpy-1.24.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b162ac10ca38850510caf8ea33f89edcb7b0bb0dfa5592d59909419986b72407"}, + {file = "numpy-1.24.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26089487086f2648944f17adaa1a97ca6aee57f513ba5f1c0b7ebdabbe2b9954"}, + {file = "numpy-1.24.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caf65a396c0d1f9809596be2e444e3bd4190d86d5c1ce21f5fc4be60a3bc5b36"}, + {file = "numpy-1.24.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0677a52f5d896e84414761531947c7a330d1adc07c3a4372262f25d84af7bf7"}, + {file = "numpy-1.24.1-cp38-cp38-win32.whl", hash = "sha256:dae46bed2cb79a58d6496ff6d8da1e3b95ba09afeca2e277628171ca99b99db1"}, + {file = "numpy-1.24.1-cp38-cp38-win_amd64.whl", hash = "sha256:6ec0c021cd9fe732e5bab6401adea5a409214ca5592cd92a114f7067febcba0c"}, + {file = "numpy-1.24.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28bc9750ae1f75264ee0f10561709b1462d450a4808cd97c013046073ae64ab6"}, + {file = "numpy-1.24.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84e789a085aabef2f36c0515f45e459f02f570c4b4c4c108ac1179c34d475ed7"}, + {file = "numpy-1.24.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e669fbdcdd1e945691079c2cae335f3e3a56554e06bbd45d7609a6cf568c700"}, + {file = "numpy-1.24.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef85cf1f693c88c1fd229ccd1055570cb41cdf4875873b7728b6301f12cd05bf"}, + {file = "numpy-1.24.1-cp39-cp39-win32.whl", hash = "sha256:87a118968fba001b248aac90e502c0b13606721b1343cdaddbc6e552e8dfb56f"}, + {file = "numpy-1.24.1-cp39-cp39-win_amd64.whl", hash = "sha256:ddc7ab52b322eb1e40521eb422c4e0a20716c271a306860979d450decbb51b8e"}, + {file = "numpy-1.24.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed5fb71d79e771ec930566fae9c02626b939e37271ec285e9efaf1b5d4370e7d"}, + {file = "numpy-1.24.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad2925567f43643f51255220424c23d204024ed428afc5aad0f86f3ffc080086"}, + {file = "numpy-1.24.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cfa1161c6ac8f92dea03d625c2d0c05e084668f4a06568b77a25a89111621566"}, + {file = "numpy-1.24.1.tar.gz", hash = "sha256:2386da9a471cc00a1f47845e27d916d5ec5346ae9696e01a8a34760858fe9dd2"}, +] + +[[package]] +name = "orjson" +version = "3.8.5" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "orjson-3.8.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:143639b9898b094883481fac37733231da1c2ae3aec78a1dd8d3b58c9c9fceef"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:31f43e63e0d94784c55e86bd376df3f80b574bea8c0bc5ecd8041009fa8ec78a"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c802ea6d4a0d40f096aceb5e7ef0a26c23d276cb9334e1cadcf256bb090b6426"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf298b55b371c2772420c5ace4d47b0a3ea1253667e20ded3c363160fd0575f6"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68cb4a8501a463771d55bb22fc72795ec7e21d71ab083e000a2c3b651b6fb2af"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4f1427952b3bd92bfb63a61b7ffc33a9f54ec6de296fa8d924cbeba089866acb"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0a9f329468c8eb000742455b83546849bcd69495d6baa6e171c7ee8600a47bd"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6535d527aa1e4a757a6ce9b61f3dd74edc762e7d2c6991643aae7c560c8440bd"}, + {file = "orjson-3.8.5-cp310-none-win_amd64.whl", hash = "sha256:2eee64c028adf6378dd714c8debc96d5b92b6bb4862debb65ca868e59bac6c63"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f5745ff473dd5c6718bf8c8d5bc183f638b4f3e03c7163ffcda4d4ef453f42ff"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:544f1240b295083697027a5093ec66763218ff16f03521d5020e7a436d2e417b"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85c9c6bab97a831e7741089057347d99901b4db2451a076ca8adedc7d96297f"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bae7347764e7be6dada980fd071e865544c98317ab61af575c9cc5e1dc7e3fe"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f6f6e9d26a06b63126112a7bc8d8529df048d31df2a257a8484b76adf3e5d"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:758238364142fcbeca34c968beefc0875ffa10aa2f797c82f51cfb1d22d0934e"}, + {file = "orjson-3.8.5-cp311-none-win_amd64.whl", hash = "sha256:cc7579240fb88a626956a6cb4a181a11b62afbc409ce239a7b866568a2412fa2"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:79aa3e47cbbd4eedbbde4f988f766d6cf38ccb51d52cfabfeb6b8d1b58654d25"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2544cd0d089faa862f5a39f508ee667419e3f9e11f119a6b1505cfce0eb26601"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2be0025ca7e460bcacb250aba8ce0239be62957d58cf34045834cc9302611d3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b57bf72902d818506906e49c677a791f90dbd7f0997d60b14bc6c1ce4ce4cf9"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ae9832a11c6a9efa8c14224e5caf6e35046efd781de14e59eb69ab4e561cf3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:0e28330cc6d51741cad0edd1b57caf6c5531aff30afe41402acde0a03246b8ed"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:155954d725627b5480e6cc1ca488afb4fa685099a4ace5f5bf21a182fabf6706"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ece1b6ef9312df5d5274ca6786e613b7da7de816356e36bcad9ea8a73d15ab71"}, + {file = "orjson-3.8.5-cp37-none-win_amd64.whl", hash = "sha256:6f58d1f0702332496bc1e2d267c7326c851991b62cf6395370d59c47f9890007"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:933f4ab98362f46a59a6d0535986e1f0cae2f6b42435e24a55922b4bc872af0c"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:47a7ca236b25a138a74b2cb5169adcdc5b2b8abdf661de438ba65967a2cde9dc"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b573ca942c626fcf8a86be4f180b86b2498b18ae180f37b4180c2aced5808710"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9bab11611d5452efe4ae5315f5eb806f66104c08a089fb84c648d2e8e00f106"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee2f5f6476617d01ca166266d70fd5605d3397a41f067022ce04a2e1ced4c8d"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:ec0b0b6cd0b84f03537f22b719aca705b876c54ab5cf3471d551c9644127284f"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df3287dc304c8c4556dc85c4ab89eb333307759c1863f95e72e555c0cfce3e01"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09f40add3c2d208e20f8bf185df38f992bf5092202d2d30eced8f6959963f1d5"}, + {file = "orjson-3.8.5-cp38-none-win_amd64.whl", hash = "sha256:232ec1df0d708f74e0dd1fccac1e9a7008cd120d48fe695e8f0c9d80771da430"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8fba3e7aede3e88a01e94e6fe63d4580162b212e6da27ae85af50a1787e41416"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:85e22c358cab170c8604e9edfffcc45dd7b0027ce57ed6bcacb556e8bfbbb704"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeab1d8247507a75926adf3ca995c74e91f5db1f168815bf3e774f992ba52b50"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daaaef15a41e9e8cadc7677cefe00065ae10bce914eefe8da1cd26b3d063970b"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ccc9f52cf46bd353c6ae1153eaf9d18257ddc110d135198b0cd8718474685ce"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d48c182c7ff4ea0787806de8a2f9298ca44fd0068ecd5f23a4b2d8e03c745cb6"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1848e3b4cc09cc82a67262ae56e2a772b0548bb5a6f9dcaee10dcaaf0a5177b7"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38480031bc8add58effe802291e4abf7042ef72ae1a4302efe9a36c8f8bfbfcc"}, + {file = "orjson-3.8.5-cp39-none-win_amd64.whl", hash = "sha256:0e9a1c2e649cbaed410c882cedc8f3b993d8f1426d9327f31762d3f46fe7cc88"}, + {file = "orjson-3.8.5.tar.gz", hash = "sha256:77a3b2bd0c4ef7723ea09081e3329dac568a62463aed127c1501441b07ffc64b"}, +] + +[[package]] +name = "packaging" +version = "23.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] + +[[package]] +name = "pandas" +version = "1.5.3" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] + +[package.dependencies] +numpy = {version = ">=1.20.3", markers = "python_version < \"3.10\""} +python-dateutil = ">=2.8.1" +pytz = ">=2020.1" + +[package.extras] +test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] + +[[package]] +name = "pillow" +version = "9.4.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, + {file = "Pillow-9.4.0-2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0"}, + {file = "Pillow-9.4.0-2-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f"}, + {file = "Pillow-9.4.0-2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c"}, + {file = "Pillow-9.4.0-2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848"}, + {file = "Pillow-9.4.0-2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1"}, + {file = "Pillow-9.4.0-2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33"}, + {file = "Pillow-9.4.0-2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "pycryptodome" +version = "3.17" +description = "Cryptographic library for Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodome-3.17-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:2c5631204ebcc7ae33d11c43037b2dafe25e2ab9c1de6448eb6502ac69c19a56"}, + {file = "pycryptodome-3.17-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:04779cc588ad8f13c80a060b0b1c9d1c203d051d8a43879117fe6b8aaf1cd3fa"}, + {file = "pycryptodome-3.17-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f812d58c5af06d939b2baccdda614a3ffd80531a26e5faca2c9f8b1770b2b7af"}, + {file = "pycryptodome-3.17-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:9453b4e21e752df8737fdffac619e93c9f0ec55ead9a45df782055eb95ef37d9"}, + {file = "pycryptodome-3.17-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:121d61663267f73692e8bde5ec0d23c9146465a0d75cad75c34f75c752527b01"}, + {file = "pycryptodome-3.17-cp27-cp27m-win32.whl", hash = "sha256:ba2d4fcb844c6ba5df4bbfee9352ad5352c5ae939ac450e06cdceff653280450"}, + {file = "pycryptodome-3.17-cp27-cp27m-win_amd64.whl", hash = "sha256:87e2ca3aa557781447428c4b6c8c937f10ff215202ab40ece5c13a82555c10d6"}, + {file = "pycryptodome-3.17-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f44c0d28716d950135ff21505f2c764498eda9d8806b7c78764165848aa419bc"}, + {file = "pycryptodome-3.17-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5a790bc045003d89d42e3b9cb3cc938c8561a57a88aaa5691512e8540d1ae79c"}, + {file = "pycryptodome-3.17-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:d086d46774e27b280e4cece8ab3d87299cf0d39063f00f1e9290d096adc5662a"}, + {file = "pycryptodome-3.17-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:5587803d5b66dfd99e7caa31ed91fba0fdee3661c5d93684028ad6653fce725f"}, + {file = "pycryptodome-3.17-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:e7debd9c439e7b84f53be3cf4ba8b75b3d0b6e6015212355d6daf44ac672e210"}, + {file = "pycryptodome-3.17-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ca1ceb6303be1282148f04ac21cebeebdb4152590842159877778f9cf1634f09"}, + {file = "pycryptodome-3.17-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:dc22cc00f804485a3c2a7e2010d9f14a705555f67020eb083e833cabd5bd82e4"}, + {file = "pycryptodome-3.17-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80ea8333b6a5f2d9e856ff2293dba2e3e661197f90bf0f4d5a82a0a6bc83a626"}, + {file = "pycryptodome-3.17-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c133f6721fba313722a018392a91e3c69d3706ae723484841752559e71d69dc6"}, + {file = "pycryptodome-3.17-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:333306eaea01fde50a73c4619e25631e56c4c61bd0fb0a2346479e67e3d3a820"}, + {file = "pycryptodome-3.17-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:1a30f51b990994491cec2d7d237924e5b6bd0d445da9337d77de384ad7f254f9"}, + {file = "pycryptodome-3.17-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:909e36a43fe4a8a3163e9c7fc103867825d14a2ecb852a63d3905250b308a4e5"}, + {file = "pycryptodome-3.17-cp35-abi3-win32.whl", hash = "sha256:a3228728a3808bc9f18c1797ec1179a0efb5068c817b2ffcf6bcd012494dffb2"}, + {file = "pycryptodome-3.17-cp35-abi3-win_amd64.whl", hash = "sha256:9ec565e89a6b400eca814f28d78a9ef3f15aea1df74d95b28b7720739b28f37f"}, + {file = "pycryptodome-3.17-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:e1819b67bcf6ca48341e9b03c2e45b1c891fa8eb1a8458482d14c2805c9616f2"}, + {file = "pycryptodome-3.17-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:f8e550caf52472ae9126953415e4fc554ab53049a5691c45b8816895c632e4d7"}, + {file = "pycryptodome-3.17-pp27-pypy_73-win32.whl", hash = "sha256:afbcdb0eda20a0e1d44e3a1ad6d4ec3c959210f4b48cabc0e387a282f4c7deb8"}, + {file = "pycryptodome-3.17-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a74f45aee8c5cc4d533e585e0e596e9f78521e1543a302870a27b0ae2106381e"}, + {file = "pycryptodome-3.17-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38bbd6717eac084408b4094174c0805bdbaba1f57fc250fd0309ae5ec9ed7e09"}, + {file = "pycryptodome-3.17-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f68d6c8ea2974a571cacb7014dbaada21063a0375318d88ac1f9300bc81e93c3"}, + {file = "pycryptodome-3.17-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8198f2b04c39d817b206ebe0db25a6653bb5f463c2319d6f6d9a80d012ac1e37"}, + {file = "pycryptodome-3.17-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a232474cd89d3f51e4295abe248a8b95d0332d153bf46444e415409070aae1e"}, + {file = "pycryptodome-3.17-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4992ec965606054e8326e83db1c8654f0549cdb26fce1898dc1a20bc7684ec1c"}, + {file = "pycryptodome-3.17-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53068e33c74f3b93a8158dacaa5d0f82d254a81b1002e0cd342be89fcb3433eb"}, + {file = "pycryptodome-3.17-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:74794a2e2896cd0cf56fdc9db61ef755fa812b4a4900fa46c49045663a92b8d0"}, + {file = "pycryptodome-3.17.tar.gz", hash = "sha256:bce2e2d8e82fcf972005652371a3e8731956a0c1fbb719cc897943b3695ad91b"}, +] + +[[package]] +name = "pydantic" +version = "1.10.4" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854"}, + {file = "pydantic-1.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817"}, + {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c"}, + {file = "pydantic-1.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e"}, + {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85"}, + {file = "pydantic-1.10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6"}, + {file = "pydantic-1.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d"}, + {file = "pydantic-1.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53"}, + {file = "pydantic-1.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3"}, + {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a"}, + {file = "pydantic-1.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d"}, + {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72"}, + {file = "pydantic-1.10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857"}, + {file = "pydantic-1.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3"}, + {file = "pydantic-1.10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00"}, + {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978"}, + {file = "pydantic-1.10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e"}, + {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa"}, + {file = "pydantic-1.10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8"}, + {file = "pydantic-1.10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903"}, + {file = "pydantic-1.10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423"}, + {file = "pydantic-1.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f"}, + {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06"}, + {file = "pydantic-1.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15"}, + {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c"}, + {file = "pydantic-1.10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416"}, + {file = "pydantic-1.10.4-cp38-cp38-win_amd64.whl", hash = "sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6"}, + {file = "pydantic-1.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d"}, + {file = "pydantic-1.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f"}, + {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024"}, + {file = "pydantic-1.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c"}, + {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28"}, + {file = "pydantic-1.10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f"}, + {file = "pydantic-1.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4"}, + {file = "pydantic-1.10.4-py3-none-any.whl", hash = "sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774"}, + {file = "pydantic-1.10.4.tar.gz", hash = "sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydub" +version = "0.25.1" +description = "Manipulate audio with an simple and easy high level interface" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, + {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, +] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrsistent" +version = "0.19.3" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, +] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-multipart" +version = "0.0.5" +description = "A streaming multipart parser for Python" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"}, +] + +[package.dependencies] +six = ">=1.4.0" + +[[package]] +name = "pytz" +version = "2022.7.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, +] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "requests" +version = "2.28.2" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "starlette" +version = "0.22.0" +description = "The little ASGI library that shines." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "starlette-0.22.0-py3-none-any.whl", hash = "sha256:b5eda991ad5f0ee5d8ce4c4540202a573bb6691ecd0c712262d0bc85cf8f2c50"}, + {file = "starlette-0.22.0.tar.gz", hash = "sha256:b092cbc365bea34dd6840b42861bdabb2f507f8671e642e8272d2442e08ea4ff"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] + +[[package]] +name = "toolz" +version = "0.12.0" +description = "List processing tools and functional utilities" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, + {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, +] + +[[package]] +name = "tqdm" +version = "4.64.1" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"}, + {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, +] + +[[package]] +name = "uc-micro-py" +version = "1.0.1" +description = "Micro subset of unicode data files for linkify-it-py projects." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uc-micro-py-1.0.1.tar.gz", hash = "sha256:b7cdf4ea79433043ddfe2c82210208f26f7962c0cfbe3bacb05ee879a7fdb596"}, + {file = "uc_micro_py-1.0.1-py3-none-any.whl", hash = "sha256:316cfb8b6862a0f1d03540f0ae6e7b033ff1fa0ddbe60c12cbe0d4cec846a69f"}, +] + +[package.extras] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "urllib3" +version = "1.26.14" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uvicorn" +version = "0.20.0" +description = "The lightning-fast ASGI server." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.20.0-py3-none-any.whl", hash = "sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd"}, + {file = "uvicorn-0.20.0.tar.gz", hash = "sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "websockets" +version = "10.4" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "websockets-10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d58804e996d7d2307173d56c297cf7bc132c52df27a3efaac5e8d43e36c21c48"}, + {file = "websockets-10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc0b82d728fe21a0d03e65f81980abbbcb13b5387f733a1a870672c5be26edab"}, + {file = "websockets-10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba089c499e1f4155d2a3c2a05d2878a3428cf321c848f2b5a45ce55f0d7d310c"}, + {file = "websockets-10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33d69ca7612f0ddff3316b0c7b33ca180d464ecac2d115805c044bf0a3b0d032"}, + {file = "websockets-10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62e627f6b6d4aed919a2052efc408da7a545c606268d5ab5bfab4432734b82b4"}, + {file = "websockets-10.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ea7b82bfcae927eeffc55d2ffa31665dc7fec7b8dc654506b8e5a518eb4d50"}, + {file = "websockets-10.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e0cb5cc6ece6ffa75baccfd5c02cffe776f3f5c8bf486811f9d3ea3453676ce8"}, + {file = "websockets-10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae5e95cfb53ab1da62185e23b3130e11d64431179debac6dc3c6acf08760e9b1"}, + {file = "websockets-10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7c584f366f46ba667cfa66020344886cf47088e79c9b9d39c84ce9ea98aaa331"}, + {file = "websockets-10.4-cp310-cp310-win32.whl", hash = "sha256:b029fb2032ae4724d8ae8d4f6b363f2cc39e4c7b12454df8df7f0f563ed3e61a"}, + {file = "websockets-10.4-cp310-cp310-win_amd64.whl", hash = "sha256:8dc96f64ae43dde92530775e9cb169979f414dcf5cff670455d81a6823b42089"}, + {file = "websockets-10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47a2964021f2110116cc1125b3e6d87ab5ad16dea161949e7244ec583b905bb4"}, + {file = "websockets-10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e789376b52c295c4946403bd0efecf27ab98f05319df4583d3c48e43c7342c2f"}, + {file = "websockets-10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d3f0b61c45c3fa9a349cf484962c559a8a1d80dae6977276df8fd1fa5e3cb8c"}, + {file = "websockets-10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55b5905705725af31ccef50e55391621532cd64fbf0bc6f4bac935f0fccec46"}, + {file = "websockets-10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00c870522cdb69cd625b93f002961ffb0c095394f06ba8c48f17eef7c1541f96"}, + {file = "websockets-10.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f38706e0b15d3c20ef6259fd4bc1700cd133b06c3c1bb108ffe3f8947be15fa"}, + {file = "websockets-10.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f2c38d588887a609191d30e902df2a32711f708abfd85d318ca9b367258cfd0c"}, + {file = "websockets-10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe10ddc59b304cb19a1bdf5bd0a7719cbbc9fbdd57ac80ed436b709fcf889106"}, + {file = "websockets-10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90fcf8929836d4a0e964d799a58823547df5a5e9afa83081761630553be731f9"}, + {file = "websockets-10.4-cp311-cp311-win32.whl", hash = "sha256:b9968694c5f467bf67ef97ae7ad4d56d14be2751000c1207d31bf3bb8860bae8"}, + {file = "websockets-10.4-cp311-cp311-win_amd64.whl", hash = "sha256:a7a240d7a74bf8d5cb3bfe6be7f21697a28ec4b1a437607bae08ac7acf5b4882"}, + {file = "websockets-10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74de2b894b47f1d21cbd0b37a5e2b2392ad95d17ae983e64727e18eb281fe7cb"}, + {file = "websockets-10.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a686ecb4aa0d64ae60c9c9f1a7d5d46cab9bfb5d91a2d303d00e2cd4c4c5cc"}, + {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d15c968ea7a65211e084f523151dbf8ae44634de03c801b8bd070b74e85033"}, + {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00213676a2e46b6ebf6045bc11d0f529d9120baa6f58d122b4021ad92adabd41"}, + {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e23173580d740bf8822fd0379e4bf30aa1d5a92a4f252d34e893070c081050df"}, + {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:dd500e0a5e11969cdd3320935ca2ff1e936f2358f9c2e61f100a1660933320ea"}, + {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4239b6027e3d66a89446908ff3027d2737afc1a375f8fd3eea630a4842ec9a0c"}, + {file = "websockets-10.4-cp37-cp37m-win32.whl", hash = "sha256:8a5cc00546e0a701da4639aa0bbcb0ae2bb678c87f46da01ac2d789e1f2d2038"}, + {file = "websockets-10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a9f9a735deaf9a0cadc2d8c50d1a5bcdbae8b6e539c6e08237bc4082d7c13f28"}, + {file = "websockets-10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c1289596042fad2cdceb05e1ebf7aadf9995c928e0da2b7a4e99494953b1b94"}, + {file = "websockets-10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0cff816f51fb33c26d6e2b16b5c7d48eaa31dae5488ace6aae468b361f422b63"}, + {file = "websockets-10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dd9becd5fe29773d140d68d607d66a38f60e31b86df75332703757ee645b6faf"}, + {file = "websockets-10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45ec8e75b7dbc9539cbfafa570742fe4f676eb8b0d3694b67dabe2f2ceed8aa6"}, + {file = "websockets-10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f72e5cd0f18f262f5da20efa9e241699e0cf3a766317a17392550c9ad7b37d8"}, + {file = "websockets-10.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185929b4808b36a79c65b7865783b87b6841e852ef5407a2fb0c03381092fa3b"}, + {file = "websockets-10.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d27a7e34c313b3a7f91adcd05134315002aaf8540d7b4f90336beafaea6217c"}, + {file = "websockets-10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:884be66c76a444c59f801ac13f40c76f176f1bfa815ef5b8ed44321e74f1600b"}, + {file = "websockets-10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:931c039af54fc195fe6ad536fde4b0de04da9d5916e78e55405436348cfb0e56"}, + {file = "websockets-10.4-cp38-cp38-win32.whl", hash = "sha256:db3c336f9eda2532ec0fd8ea49fef7a8df8f6c804cdf4f39e5c5c0d4a4ad9a7a"}, + {file = "websockets-10.4-cp38-cp38-win_amd64.whl", hash = "sha256:48c08473563323f9c9debac781ecf66f94ad5a3680a38fe84dee5388cf5acaf6"}, + {file = "websockets-10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:40e826de3085721dabc7cf9bfd41682dadc02286d8cf149b3ad05bff89311e4f"}, + {file = "websockets-10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56029457f219ade1f2fc12a6504ea61e14ee227a815531f9738e41203a429112"}, + {file = "websockets-10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fc088b7a32f244c519a048c170f14cf2251b849ef0e20cbbb0fdf0fdaf556f"}, + {file = "websockets-10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc8709c00704194213d45e455adc106ff9e87658297f72d544220e32029cd3d"}, + {file = "websockets-10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0154f7691e4fe6c2b2bc275b5701e8b158dae92a1ab229e2b940efe11905dff4"}, + {file = "websockets-10.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c6d2264f485f0b53adf22697ac11e261ce84805c232ed5dbe6b1bcb84b00ff0"}, + {file = "websockets-10.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9bc42e8402dc5e9905fb8b9649f57efcb2056693b7e88faa8fb029256ba9c68c"}, + {file = "websockets-10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:edc344de4dac1d89300a053ac973299e82d3db56330f3494905643bb68801269"}, + {file = "websockets-10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:84bc2a7d075f32f6ed98652db3a680a17a4edb21ca7f80fe42e38753a58ee02b"}, + {file = "websockets-10.4-cp39-cp39-win32.whl", hash = "sha256:c94ae4faf2d09f7c81847c63843f84fe47bf6253c9d60b20f25edfd30fb12588"}, + {file = "websockets-10.4-cp39-cp39-win_amd64.whl", hash = "sha256:bbccd847aa0c3a69b5f691a84d2341a4f8a629c6922558f2a70611305f902d74"}, + {file = "websockets-10.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:82ff5e1cae4e855147fd57a2863376ed7454134c2bf49ec604dfe71e446e2193"}, + {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d210abe51b5da0ffdbf7b43eed0cfdff8a55a1ab17abbec4301c9ff077dd0342"}, + {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:942de28af58f352a6f588bc72490ae0f4ccd6dfc2bd3de5945b882a078e4e179"}, + {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b27d6c1c6cd53dc93614967e9ce00ae7f864a2d9f99fe5ed86706e1ecbf485"}, + {file = "websockets-10.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3d3cac3e32b2c8414f4f87c1b2ab686fa6284a980ba283617404377cd448f631"}, + {file = "websockets-10.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:da39dd03d130162deb63da51f6e66ed73032ae62e74aaccc4236e30edccddbb0"}, + {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389f8dbb5c489e305fb113ca1b6bdcdaa130923f77485db5b189de343a179393"}, + {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09a1814bb15eff7069e51fed0826df0bc0702652b5cb8f87697d469d79c23576"}, + {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff64a1d38d156d429404aaa84b27305e957fd10c30e5880d1765c9480bea490f"}, + {file = "websockets-10.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b343f521b047493dc4022dd338fc6db9d9282658862756b4f6fd0e996c1380e1"}, + {file = "websockets-10.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:932af322458da7e4e35df32f050389e13d3d96b09d274b22a7aa1808f292fee4"}, + {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a4162139374a49eb18ef5b2f4da1dd95c994588f5033d64e0bbfda4b6b6fcf"}, + {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c57e4c1349fbe0e446c9fa7b19ed2f8a4417233b6984277cce392819123142d3"}, + {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b627c266f295de9dea86bd1112ed3d5fafb69a348af30a2422e16590a8ecba13"}, + {file = "websockets-10.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:05a7233089f8bd355e8cbe127c2e8ca0b4ea55467861906b80d2ebc7db4d6b72"}, + {file = "websockets-10.4.tar.gz", hash = "sha256:eef610b23933c54d5d921c92578ae5f89813438fded840c2e9809d378dc765d3"}, +] + +[[package]] +name = "yarl" +version = "1.8.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "~3.9.15" +content-hash = "3cfe0b9594309e180b37d2405ae2f51be4d0ee50b56f874152bc914121724c29" diff --git a/front/admin_ui/pyproject.toml b/front/admin_ui/pyproject.toml new file mode 100644 index 00000000..807e15fd --- /dev/null +++ b/front/admin_ui/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +name = "admin-ui" +version = "0.1.0" +description = "Admin interface for datasets-server" +authors = ["Quentin Lhoest <[email protected]>"] + +[tool.poetry.dependencies] +gradio = "~3.16.1" +matplotlib = "^3.3.4" +requests = "^2.26.0" +python = "~3.9.15" +huggingface-hub = "^0.12.0" +duckdb = "^0.6.1" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/front/admin_ui/requirements.txt b/front/admin_ui/requirements.txt new file mode 100644 index 00000000..e1e0d9a9 --- /dev/null +++ b/front/admin_ui/requirements.txt @@ -0,0 +1,5 @@ +gradio==~3.16.1 +matplotlib==^3.3.4 +requests==^2.26.0 +huggingface-hub==~0.12.0 +duckdb==~0.6.1
042e8a8207803d74260f8ef0784bce40d4e397a4
Rémy
2023-01-31T12:28:14
fix: remove mongo migration job execution on pre-install hook
diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml index 4c23212c..68ea3bd5 100644 --- a/chart/templates/jobs/mongodb-migration/job.yaml +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -12 +12 @@ metadata: - "helm.sh/hook": pre-install,pre-upgrade + "helm.sh/hook": pre-upgrade
eaa2c47807a0d6185dd8e3b8b65e566e1abfbd48
Sylvain Lesage
2023-01-31T12:23:36
feat: 🎸 add indexes, based on recommendations from mongo cloud (#728)
diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index 7a8de583..cf808f5b 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -131,0 +132 @@ class Job(Document): + "dataset", @@ -136 +137 @@ class Job(Document): - ("status", "type", "created_at", "namespace", "unicity_id", "priority"), + ("priority", "status", "type", "created_at", "namespace", "unicity_id"),
6bd97091b3e5d96c7424f71169891aa3e0f5a526
Sylvain Lesage
2023-01-31T09:31:59
feat: 🎸 adapt number of replicas to flush the queues (#733)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index c9e69196..d1dd2901 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -259 +259 @@ splits: - replicas: 12 + replicas: 8 @@ -279 +279 @@ firstRows: - replicas: 24 + replicas: 90 @@ -301 +301 @@ parquetAndDatasetInfo: - replicas: 24 + replicas: 30 @@ -318 +318 @@ parquet: - replicas: 4 + replicas: 2 @@ -335 +335 @@ datasetInfo: - replicas: 4 + replicas: 2 @@ -352 +352 @@ sizes: - replicas: 4 + replicas: 2
668f806336b07a8a9a3d5f66382f44b98f2afd64
Sylvain Lesage
2023-01-30T18:23:54
fix: 🐛 fix two labels (#730)
diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index c26333c5..0a7e2486 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -131 +131 @@ app.kubernetes.io/component: "{{ include "name" . }}-api" -app: "{{ include "release" . }}-worker-config-names" +app.kubernetes.io/component: "{{ include "name" . }}-worker-config-names" @@ -136 +136 @@ app: "{{ include "release" . }}-worker-config-names" -app: "{{ include "release" . }}-worker-split-names" +app.kubernetes.io/component: "{{ include "name" . }}-worker-split-names"
3ac5a221c796e71821ef84d8ef973a0b677cfdb0
Rémy
2023-01-30T16:44:49
feat: publish helm chart on HF internal registry
diff --git a/.github/workflows/publish-helm.yml b/.github/workflows/publish-helm.yml new file mode 100644 index 00000000..dc643bf1 --- /dev/null +++ b/.github/workflows/publish-helm.yml @@ -0,0 +1,40 @@ +name: Publish Helm Chart + +on: + workflow_dispatch: + push: + branches: + - main + paths: + - 'chart/**' + +jobs: + publish: + name: Publish on registry + runs-on: ubuntu-latest + env: + HELM_REPO_USERNAME: ${{ secrets.REGISTRY_USERNAME }} + HELM_REPO_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install Helm + run: | + curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash + helm plugin install https://github.com/chartmuseum/helm-push + + - name: Tailscale + uses: tailscale/github-action@v1 + with: + authkey: ${{ secrets.TAILSCALE_AUTHKEY }} + + - name: Add repository + run: | + helm repo add charts ${{ secrets.REGISTRY_URL }}/chartrepo/charts + + - name: Publish + run: | + cd chart + helm dependencies update + helm cm-push . charts
6f1dc39c2bf4313f3ec6252c03e1cae9a4d87398
Rémy
2023-01-30T15:03:38
feat: private hub refactoring
diff --git a/.gitignore b/.gitignore index 3b11076c..f2db052a 100644 --- a/.gitignore +++ b/.gitignore @@ -11,0 +12 @@ +.idea diff --git a/chart/Chart.lock b/chart/Chart.lock index 923e42fe..1273ec41 100644 --- a/chart/Chart.lock +++ b/chart/Chart.lock @@ -4,3 +4,3 @@ dependencies: - version: 12.0.0 -digest: sha256:13b02639b09eca43c731faaa39c0370ac804ad840e501b883deb40b910ea8115 -generated: "2022-05-05T09:50:37.216564475Z" + version: 13.6.4 +digest: sha256:d9d99bcc06040ce2432a014455faf0e7b3b27c20d3a80bbe50b4e9259351e1ac +generated: "2023-01-20T11:34:50.070944+01:00" diff --git a/chart/Chart.yaml b/chart/Chart.yaml index ff3ee87e..2b4e2f76 100644 --- a/chart/Chart.yaml +++ b/chart/Chart.yaml @@ -23 +23 @@ type: application -version: 0.1.0 +version: 1.0.0 @@ -35 +35 @@ dependencies: - version: 12.0.0 + version: 13.6.4 diff --git a/chart/Makefile b/chart/Makefile index f0d8b43f..236f370f 100644 --- a/chart/Makefile +++ b/chart/Makefile @@ -1 +0,0 @@ -CHART_NAME := datasets-server @@ -10 +9 @@ uninstall: - helm uninstall $(CHART_NAME)-$(ENV) -n $(K8S_NAMESPACE) + helm uninstall $(ENV) -n $(K8S_NAMESPACE) @@ -14 +13 @@ diff: - helm diff upgrade --install $(CHART_NAME)-$(ENV) . --values docker-images.yaml --values env/$(ENV).yaml -n $(K8S_NAMESPACE) + helm diff upgrade --install $(ENV) . --values env/$(ENV).yaml -n $(K8S_NAMESPACE) @@ -18 +17 @@ upgrade: - helm upgrade --install $(CHART_NAME)-$(ENV) . --values docker-images.yaml --values env/$(ENV).yaml -n $(K8S_NAMESPACE) + helm upgrade --install $(ENV) . --values env/$(ENV).yaml -n $(K8S_NAMESPACE) diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index be8a9de6..e69de29b 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -1,15 +0,0 @@ -{ - "dockerImage": { - "reverseProxy": "docker.io/nginx:1.20", - "jobs": { - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-2f38593" - }, - "services": { - "admin": "huggingface/datasets-server-services-admin:sha-2f38593", - "api": "huggingface/datasets-server-services-api:sha-2f38593" - }, - "workers": { - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-2f38593" - } - } -} diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index bdec9ba3..c9e69196 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -43,3 +43,31 @@ -# --- common parameters --- - -hostname: "datasets-server.huggingface.co" +images: + pullPolicy: IfNotPresent + pullSecrets: [] + reverseProxy: + useGlobalRegistry: false + registry: docker.io + repository: nginx + tag: "1.20" + jobs: + mongodbMigration: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-jobs-mongodb_migration + tag: sha-2f38593 + services: + admin: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-services-admin + tag: sha-2f38593 + api: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-services-api + tag: sha-2f38593 + workers: + datasetsBased: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-workers-datasets_based + tag: sha-2f38593 @@ -71,0 +100,2 @@ common: + # URL of the HuggingFace Hub + hfEndpoint: "https://huggingface.co" @@ -79,0 +110,5 @@ mongodbMigration: + resources: + requests: + cpu: 1 + limits: + cpu: 1 @@ -108,0 +144 @@ reverseProxy: + type: NodePort @@ -149,0 +186,2 @@ admin: + service: + type: NodePort @@ -166,0 +205,2 @@ api: + service: + type: NodePort diff --git a/chart/templates/_envCache.tpl b/chart/templates/_envCache.tpl index 4b539a53..4701fbfd 100644 --- a/chart/templates/_envCache.tpl +++ b/chart/templates/_envCache.tpl @@ -8,13 +8 @@ - {{- if .Values.secrets.mongoUrl.fromSecret }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl.secretName | quote }} - key: MONGO_URL - optional: false - {{- else }} - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - value: {{ .Values.secrets.mongoUrl.value }} - {{- end }} - {{- end }} + {{ include "datasetServer.mongo.url" . | nindent 2 }} diff --git a/chart/templates/_envCommon.tpl b/chart/templates/_envCommon.tpl index 87f2b807..50263629 100644 --- a/chart/templates/_envCommon.tpl +++ b/chart/templates/_envCommon.tpl @@ -6 +6 @@ - value: {{ .Values.common.hfEndpoint | quote }} + value: {{ include "datasetsServer.hub.url" . }} @@ -8 +8 @@ - value: {{ .Values.common.hfEndpoint | quote }} + value: {{ include "datasetsServer.hub.url" . }} @@ -25,0 +26,16 @@ + +{{- define "datasetServer.mongo.url" -}} +{{- if .Values.secrets.mongoUrl.fromSecret }} +valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl.secretName | quote }} + key: MONGO_URL + optional: false +{{- else }} + {{- if .Values.mongodb.enabled }} +value: mongodb://{{.Release.Name}}-datasets-server-mongodb + {{- else }} +value: {{ .Values.secrets.mongoUrl.value }} + {{- end }} +{{- end }} +{{- end -}} \ No newline at end of file diff --git a/chart/templates/_envQueue.tpl b/chart/templates/_envQueue.tpl index 061480b7..776ebba7 100644 --- a/chart/templates/_envQueue.tpl +++ b/chart/templates/_envQueue.tpl @@ -8,13 +8 @@ - {{- if .Values.secrets.mongoUrl.fromSecret }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl.secretName | quote }} - key: MONGO_URL - optional: false - {{- else }} - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - value: {{ .Values.secrets.mongoUrl.value }} - {{- end }} - {{- end }} + {{ include "datasetServer.mongo.url" . | nindent 2 }} diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index 7c592a14..c26333c5 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -8 +8 @@ Expand the name of the chart. -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- ((list $.Release.Name .Chart.Name) | join "-") | trunc 63 | trimSuffix "-" -}} @@ -15 +15 @@ Expand the name of the release. -{{- default .Release.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- default .Release.Name | trunc 63 | trimSuffix "-" }} @@ -24,0 +25 @@ Create chart name and version as used by the chart label. + @@ -26 +27 @@ Create chart name and version as used by the chart label. -Selector labels +Docker image management @@ -28,4 +29,64 @@ Selector labels -{{- define "selectorLabels" -}} -app.kubernetes.io/name: {{ include "name" . }} -app.kubernetes.io/instance: {{ .Release.Name }} -{{- end }} +{{- define "datasetsServer.images.image" -}} +{{- $registryName := .imageRoot.registry -}} +{{- $repositoryName := .imageRoot.repository -}} +{{- $separator := ":" -}} +{{- $termination := .imageRoot.tag | toString -}} +{{- if .global }} + {{- if and .global.imageRegistry .imageRoot.useGlobalRegistry }} + {{- $registryName = .global.imageRegistry -}} + {{- end -}} +{{- end -}} +{{- if .imageRoot.digest }} + {{- $separator = "@" -}} + {{- $termination = .imageRoot.digest | toString -}} +{{- end -}} +{{- printf "%s/%s%s%s" $registryName $repositoryName $separator $termination -}} +{{- end -}} + +{{- define "common.images.pullSecrets" -}} + {{- $pullSecrets := list }} + + {{- if .global }} + {{- range .global.imagePullSecrets -}} + {{- $pullSecrets = append $pullSecrets . -}} + {{- end -}} + {{- end -}} + + {{- range .images -}} + {{- range .pullSecrets -}} + {{- $pullSecrets = append $pullSecrets . -}} + {{- end -}} + {{- end -}} + + {{- if (not (empty $pullSecrets)) }} +imagePullSecrets: + {{- range $pullSecrets }} + - name: {{ . }} + {{- end }} + {{- end }} +{{- end -}} + +{{- define "reverseproxy.image" -}} +{{ include "datasetsServer.images.image" (dict "imageRoot" .Values.images.reverseProxy "global" .Values.global.huggingface) }} +{{- end -}} + +{{- define "jobs.mongodbMigration.image" -}} +{{ include "datasetsServer.images.image" (dict "imageRoot" .Values.images.jobs.mongodbMigration "global" .Values.global.huggingface) }} +{{- end -}} + +{{- define "services.admin.image" -}} +{{ include "datasetsServer.images.image" (dict "imageRoot" .Values.images.services.admin "global" .Values.global.huggingface) }} +{{- end -}} + +{{- define "services.api.image" -}} +{{ include "datasetsServer.images.image" (dict "imageRoot" .Values.images.services.api "global" .Values.global.huggingface) }} +{{- end -}} + +{{- define "workers.datasetsBased.image" -}} +{{ include "datasetsServer.images.image" (dict "imageRoot" .Values.images.workers.datasetsBased "global" .Values.global.huggingface) }} +{{- end -}} + +{{- define "image.imagePullSecrets" -}} +{{ include "common.images.pullSecrets" (dict "images" (list .Values.images) "global" .Values.global.huggingface) }} +{{- end -}} + @@ -36,6 +97,4 @@ Common labels -{{- define "labels" -}} -helm.sh/chart: {{ include "chart" . }} -{{ include "selectorLabels" . }} -{{- if .Chart.AppVersion }} -app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} -{{- end }} +{{- define "datasetServer.labels" -}} +app.kubernetes.io/name: {{ include "name" . }} +helm.sh/chart: {{ .Chart.Name }} +app.kubernetes.io/instance: {{ .Release.Name }} @@ -43,3 +101,0 @@ app.kubernetes.io/managed-by: {{ .Release.Service }} -release: {{ $.Release.Name | quote }} -heritage: {{ $.Release.Service | quote }} -chart: "{{ include "name" . }}" @@ -49,2 +105,2 @@ chart: "{{ include "name" . }}" -{{ include "labels" . }} -app: "{{ .Release.Name }}-reverse-proxy" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-reverse-proxy" @@ -54,2 +110,2 @@ app: "{{ .Release.Name }}-reverse-proxy" -{{ include "labels" . }} -app: "{{ .Release.Name }}-storage-admin" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-storage-admin" @@ -59,2 +115,2 @@ app: "{{ .Release.Name }}-storage-admin" -{{ include "labels" . }} -app: "{{ include "release" . }}-mongodb-migration" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-mongodb-migration" @@ -64,2 +120,2 @@ app: "{{ include "release" . }}-mongodb-migration" -{{ include "labels" . }} -app: "{{ include "release" . }}-admin" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-admin" @@ -69,2 +125,2 @@ app: "{{ include "release" . }}-admin" -{{ include "labels" . }} -app: "{{ include "release" . }}-api" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-api" @@ -74 +130 @@ app: "{{ include "release" . }}-api" -{{ include "labels" . }} +{{ include "datasetServer.labels" . }} @@ -79 +135 @@ app: "{{ include "release" . }}-worker-config-names" -{{ include "labels" . }} +{{ include "datasetServer.labels" . }} @@ -84,2 +140,2 @@ app: "{{ include "release" . }}-worker-split-names" -{{ include "labels" . }} -app: "{{ include "release" . }}-worker-splits" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-worker-splits" @@ -89,2 +145,2 @@ app: "{{ include "release" . }}-worker-splits" -{{ include "labels" . }} -app: "{{ include "release" . }}-worker-first-rows" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-worker-first-rows" @@ -94,2 +150,2 @@ app: "{{ include "release" . }}-worker-first-rows" -{{ include "labels" . }} -app: "{{ include "release" . }}-worker-parquet-and-dataset-info" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-worker-parquet-and-dataset-info" @@ -99,2 +155,2 @@ app: "{{ include "release" . }}-worker-parquet-and-dataset-info" -{{ include "labels" . }} -app: "{{ include "release" . }}-worker-parquet" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-worker-parquet" @@ -104,2 +160,2 @@ app: "{{ include "release" . }}-worker-parquet" -{{ include "labels" . }} -app: "{{ include "release" . }}-worker-dataset-info" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-worker-dataset-info" @@ -109,2 +165,9 @@ app: "{{ include "release" . }}-worker-dataset-info" -{{ include "labels" . }} -app: "{{ include "release" . }}-worker-sizes" +{{ include "datasetServer.labels" . }} +app.kubernetes.io/component: "{{ include "name" . }}-worker-sizes" +{{- end -}} + +{{/* +Return the api ingress anotation +*/}} +{{- define "datasetsServer.ingress.annotations" -}} +{{ .Values.ingress.annotations | toYaml }} @@ -112,0 +176,6 @@ app: "{{ include "release" . }}-worker-sizes" +{{/* +Datasets Server base url +*/}} +{{- define "datasetsServer.ingress.hostname" -}} +{{ .Values.global.huggingface.ingress.subdomains.datasetsServer }}.{{ .Values.global.huggingface.ingress.domain }} +{{- end }} @@ -118 +187 @@ The assets base URL -{{- printf "https://%s/assets" .Values.hostname }} +{{- printf "https://%s/assets" (include "datasetsServer.ingress.hostname" .) }} @@ -152 +221 @@ See https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#a-a -{{- printf "http://%s-admin.%s.svc.cluster.local:80" ( include "release" . ) ( .Release.Namespace ) }} +{{- printf "http://%s-admin.%s.svc.cluster.local:80" ( include "name" . ) ( .Release.Namespace ) }} @@ -160 +229 @@ See https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#a-a -{{- printf "http://%s-api.%s.svc.cluster.local:80" ( include "release" . ) ( .Release.Namespace ) }} +{{- printf "http://%s-api.%s.svc.cluster.local:80" ( include "name" . ) ( .Release.Namespace ) }} @@ -161,0 +231,25 @@ See https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#a-a + +{{/* +Return true if cert-manager required annotations for TLS signed +certificates are set in the Ingress annotations +Ref: https://cert-manager.io/docs/usage/ingress/#supported-annotations +Usage: +{{ include "common.ingress.certManagerRequest" ( dict "annotations" .Values.path.to.the.ingress.annotations ) }} +*/}} +{{- define "common.ingress.certManagerRequest" -}} +{{ if or (hasKey .annotations "cert-manager.io/cluster-issuer") (hasKey .annotations "cert-manager.io/issuer") (hasKey .annotations "kubernetes.io/tls-acme") }} + {{- true -}} +{{- end -}} +{{- end -}} + +{{/* +Return the HUB url +*/}} +{{- define "datasetsServer.hub.url" -}} +{{- if ne "" .Values.common.hfEndpoint -}} +{{ .Values.common.hfEndpoint | quote }} +{{- else -}} +{{- $hubName := ((list $.Release.Name "hub") | join "-") | trunc 63 | trimSuffix "-" -}} +http://{{ $hubName }} +{{- end -}} +{{- end -}} \ No newline at end of file diff --git a/chart/templates/_initContainerAssets.tpl b/chart/templates/_initContainerAssets.tpl index e220d25a..6b57aedc 100644 --- a/chart/templates/_initContainerAssets.tpl +++ b/chart/templates/_initContainerAssets.tpl @@ -7 +7 @@ - imagePullPolicy: IfNotPresent + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/_initContainerCache.tpl b/chart/templates/_initContainerCache.tpl index 6df8979b..149cd80a 100644 --- a/chart/templates/_initContainerCache.tpl +++ b/chart/templates/_initContainerCache.tpl @@ -7 +7 @@ - imagePullPolicy: IfNotPresent + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/_volumeData.tpl b/chart/templates/_volumeData.tpl index 72448740..34e22ae8 100644 --- a/chart/templates/_volumeData.tpl +++ b/chart/templates/_volumeData.tpl @@ -7 +7 @@ - claimName: {{ .Values.persistence.existingClaim | default (include "release" .) }} + claimName: {{ .Values.persistence.existingClaim | default (include "name" .) }} diff --git a/chart/templates/ingress.yaml b/chart/templates/ingress.yaml index 31547760..c484c005 100644 --- a/chart/templates/ingress.yaml +++ b/chart/templates/ingress.yaml @@ -4 +4,2 @@ metadata: - annotations: {{ toYaml .Values.ingress.annotations | nindent 4 }} + {{- $annotations := fromYaml (include "datasetsServer.ingress.annotations" .) }} + annotations: {{ toYaml $annotations | nindent 4 }} @@ -6 +7 @@ metadata: - name: {{ include "release" . }} + name: {{ include "name" . }} @@ -9 +9,0 @@ spec: - tls: {{ toYaml .Values.ingress.tls | nindent 4 }} @@ -11 +11 @@ spec: - - host: {{ .Values.hostname }} + - host: {{ include "datasetsServer.ingress.hostname" . }} @@ -16 +16 @@ spec: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" @@ -19 +19,13 @@ spec: - pathType: ImplementationSpecific + path: / + pathType: Prefix + {{- if include "common.ingress.certManagerRequest" ( dict "annotations" $annotations ) }} + tls: + - hosts: + - {{ include "datasetsServer.ingress.hostname" . }} + secretName: {{ printf "%s-tls" (include "datasetsServer.ingress.hostname" .) }} + {{- else if .Values.ingress.tls -}} + {{- with .Values.ingress.tls }} + tls: + {{- tpl (toYaml .) $ | nindent 4 }} + {{- end }} + {{- end }} diff --git a/chart/templates/jobs/mongodb-migration/_container.tpl b/chart/templates/jobs/mongodb-migration/_container.tpl index 5e52b507..a1c90051 100644 --- a/chart/templates/jobs/mongodb-migration/_container.tpl +++ b/chart/templates/jobs/mongodb-migration/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.jobs.mongodbMigration }} - imagePullPolicy: IfNotPresent + image: {{ include "jobs.mongodbMigration.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} @@ -15,13 +15 @@ - {{- if .Values.secrets.mongoUrl.fromSecret }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl.secretName | quote }} - key: MONGO_URL - optional: false - {{- else }} - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - value: {{ .Values.secrets.mongoUrl.value }} - {{- end }} - {{- end }} + {{ include "datasetServer.mongo.url" . | nindent 4 }} diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml index d0021fdf..4c23212c 100644 --- a/chart/templates/jobs/mongodb-migration/job.yaml +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -4 +4 @@ -{{- if .Values.dockerImage.jobs.mongodbMigration }} +{{- if .Values.images.jobs.mongodbMigration }} @@ -9 +9 @@ metadata: - name: "{{ include "release" . }}-job-mongodb-migration" + name: "{{ include "name" . }}-job-mongodb-migration" @@ -21 +21 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/pvc.yaml b/chart/templates/pvc.yaml index 3b602098..f4370320 100644 --- a/chart/templates/pvc.yaml +++ b/chart/templates/pvc.yaml @@ -5,2 +5,2 @@ metadata: - labels: {{ include "labels" . | nindent 4 }} - name: {{ include "release" . }} + labels: {{ include "datasetServer.labels" . | nindent 4 }} + name: {{ include "name" . }} @@ -13,0 +14 @@ spec: + {{ if ne "" .Values.persistence.storageClass }} @@ -14,0 +16 @@ spec: + {{ end }} diff --git a/chart/templates/reverse-proxy/_container.tpl b/chart/templates/reverse-proxy/_container.tpl index 8b7be35c..19bda663 100644 --- a/chart/templates/reverse-proxy/_container.tpl +++ b/chart/templates/reverse-proxy/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.reverseProxy }} - imagePullPolicy: IfNotPresent + image: {{ include "reverseproxy.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} @@ -35 +35 @@ - port: {{ .Values.reverseProxy.readinessPort }} + port: {{ .Values.reverseProxy.port }} @@ -38 +38 @@ - port: {{ .Values.reverseProxy.readinessPort }} + port: {{ .Values.reverseProxy.port }} diff --git a/chart/templates/reverse-proxy/configMap.yaml b/chart/templates/reverse-proxy/configMap.yaml index 3606cd50..603ff52c 100644 --- a/chart/templates/reverse-proxy/configMap.yaml +++ b/chart/templates/reverse-proxy/configMap.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" diff --git a/chart/templates/reverse-proxy/deployment.yaml b/chart/templates/reverse-proxy/deployment.yaml index ea0edb41..3d0398be 100644 --- a/chart/templates/reverse-proxy/deployment.yaml +++ b/chart/templates/reverse-proxy/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" @@ -28 +28 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} @@ -37 +37 @@ spec: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" @@ -45 +45 @@ spec: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" @@ -53 +53 @@ spec: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" diff --git a/chart/templates/reverse-proxy/service.yaml b/chart/templates/reverse-proxy/service.yaml index 124d9d9d..d8449c67 100644 --- a/chart/templates/reverse-proxy/service.yaml +++ b/chart/templates/reverse-proxy/service.yaml @@ -7 +7 @@ metadata: - name: "{{ include "release" . }}-reverse-proxy" + name: "{{ include "name" . }}-reverse-proxy" diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index 808c63f8..8ca3220e 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.services.admin }} - imagePullPolicy: IfNotPresent + image: {{ include "services.admin.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} @@ -39 +39 @@ - port: {{ .Values.admin.readinessPort }} + port: {{ .Values.admin.uvicornPort }} @@ -42 +42 @@ - port: {{ .Values.admin.readinessPort }} + port: {{ .Values.admin.uvicornPort }} diff --git a/chart/templates/services/admin/deployment.yaml b/chart/templates/services/admin/deployment.yaml index 5e8e6553..c701e356 100644 --- a/chart/templates/services/admin/deployment.yaml +++ b/chart/templates/services/admin/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-admin" + name: "{{ include "name" . }}-admin" @@ -25 +25 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/services/admin/service.yaml b/chart/templates/services/admin/service.yaml index e4855e62..8541ed4d 100644 --- a/chart/templates/services/admin/service.yaml +++ b/chart/templates/services/admin/service.yaml @@ -7 +7 @@ metadata: - name: "{{ include "release" . }}-admin" + name: "{{ include "name" . }}-admin" diff --git a/chart/templates/services/admin/servicemonitor.yaml b/chart/templates/services/admin/servicemonitor.yaml index de2f0d92..1d99843c 100644 --- a/chart/templates/services/admin/servicemonitor.yaml +++ b/chart/templates/services/admin/servicemonitor.yaml @@ -9 +9 @@ metadata: - name: "{{ include "release" . }}-admin" + name: "{{ include "name" . }}-admin" diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl index 06b033d6..196695fc 100644 --- a/chart/templates/services/api/_container.tpl +++ b/chart/templates/services/api/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.services.api }} - imagePullPolicy: IfNotPresent + image: {{ include "services.api.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} @@ -33 +33 @@ - port: {{ .Values.api.readinessPort }} + port: {{ .Values.api.uvicornPort }} @@ -36 +36 @@ - port: {{ .Values.api.readinessPort }} + port: {{ .Values.api.uvicornPort }} diff --git a/chart/templates/services/api/deployment.yaml b/chart/templates/services/api/deployment.yaml index 1d055ee1..e77b45e3 100644 --- a/chart/templates/services/api/deployment.yaml +++ b/chart/templates/services/api/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-api" + name: "{{ include "name" . }}-api" @@ -25 +25 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/services/api/service.yaml b/chart/templates/services/api/service.yaml index f270ce4b..fde05c46 100644 --- a/chart/templates/services/api/service.yaml +++ b/chart/templates/services/api/service.yaml @@ -7 +7 @@ metadata: - name: "{{ include "release" . }}-api" + name: "{{ include "name" . }}-api" diff --git a/chart/templates/services/api/servicemonitor.yaml b/chart/templates/services/api/servicemonitor.yaml index 3bfe8cea..7ff28f71 100644 --- a/chart/templates/services/api/servicemonitor.yaml +++ b/chart/templates/services/api/servicemonitor.yaml @@ -9 +9 @@ metadata: - name: "{{ include "release" . }}-api" + name: "{{ include "name" . }}-api" diff --git a/chart/templates/storage-admin/_container.tpl b/chart/templates/storage-admin/_container.tpl index 317cff29..f9b97b42 100644 --- a/chart/templates/storage-admin/_container.tpl +++ b/chart/templates/storage-admin/_container.tpl @@ -7 +7 @@ - imagePullPolicy: IfNotPresent + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/storage-admin/deployment.yaml b/chart/templates/storage-admin/deployment.yaml index 5c21e304..ca1bf5d3 100644 --- a/chart/templates/storage-admin/deployment.yaml +++ b/chart/templates/storage-admin/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-storage-admin" + name: "{{ include "name" . }}-storage-admin" diff --git a/chart/templates/worker/config-names/_container.tpl b/chart/templates/worker/config-names/_container.tpl index 31b3951c..c7e1f76d 100644 --- a/chart/templates/worker/config-names/_container.tpl +++ b/chart/templates/worker/config-names/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/config-names/deployment.yaml b/chart/templates/worker/config-names/deployment.yaml index 4b8de337..a58f89ae 100644 --- a/chart/templates/worker/config-names/deployment.yaml +++ b/chart/templates/worker/config-names/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-config-names" + name: "{{ include "name" . }}-worker-config-names" @@ -22 +22 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/worker/dataset-info/_container.tpl b/chart/templates/worker/dataset-info/_container.tpl index 2b45ea87..f067ddac 100644 --- a/chart/templates/worker/dataset-info/_container.tpl +++ b/chart/templates/worker/dataset-info/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/dataset-info/deployment.yaml b/chart/templates/worker/dataset-info/deployment.yaml index a802449f..cb750c10 100644 --- a/chart/templates/worker/dataset-info/deployment.yaml +++ b/chart/templates/worker/dataset-info/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-dataset-info" + name: "{{ include "name" . }}-worker-dataset-info" @@ -22 +22 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index af8b6f96..56fc9d15 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/first-rows/deployment.yaml b/chart/templates/worker/first-rows/deployment.yaml index 277ea7af..ba4a72f9 100644 --- a/chart/templates/worker/first-rows/deployment.yaml +++ b/chart/templates/worker/first-rows/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-first-rows" + name: "{{ include "name" . }}-worker-first-rows" @@ -22 +22 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/worker/parquet-and-dataset-info/_container.tpl b/chart/templates/worker/parquet-and-dataset-info/_container.tpl index f964f01a..b35d5be2 100644 --- a/chart/templates/worker/parquet-and-dataset-info/_container.tpl +++ b/chart/templates/worker/parquet-and-dataset-info/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/parquet-and-dataset-info/deployment.yaml b/chart/templates/worker/parquet-and-dataset-info/deployment.yaml index 564693fe..8faa05e3 100644 --- a/chart/templates/worker/parquet-and-dataset-info/deployment.yaml +++ b/chart/templates/worker/parquet-and-dataset-info/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-parquet-and-dataset-info" + name: "{{ include "name" . }}-worker-parquet-and-dataset-info" @@ -22,3 +22,2 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} - initContainers: - {{ include "initContainerCache" . | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} + initContainers: {{ include "initContainerCache" . | nindent 8 }} diff --git a/chart/templates/worker/parquet/_container.tpl b/chart/templates/worker/parquet/_container.tpl index febf15ad..6ab0e886 100644 --- a/chart/templates/worker/parquet/_container.tpl +++ b/chart/templates/worker/parquet/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/parquet/deployment.yaml b/chart/templates/worker/parquet/deployment.yaml index dccd619e..980b3459 100644 --- a/chart/templates/worker/parquet/deployment.yaml +++ b/chart/templates/worker/parquet/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-parquet" + name: "{{ include "name" . }}-worker-parquet" @@ -22 +22 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/worker/sizes/_container.tpl b/chart/templates/worker/sizes/_container.tpl index 4f22f6af..b67d95c4 100644 --- a/chart/templates/worker/sizes/_container.tpl +++ b/chart/templates/worker/sizes/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/sizes/deployment.yaml b/chart/templates/worker/sizes/deployment.yaml index 67452bba..5e0a1494 100644 --- a/chart/templates/worker/sizes/deployment.yaml +++ b/chart/templates/worker/sizes/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-sizes" + name: "{{ include "name" . }}-worker-sizes" @@ -22 +22 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/worker/split-names/_container.tpl b/chart/templates/worker/split-names/_container.tpl index f44d5666..e66e422c 100644 --- a/chart/templates/worker/split-names/_container.tpl +++ b/chart/templates/worker/split-names/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/split-names/deployment.yaml b/chart/templates/worker/split-names/deployment.yaml index baee269c..06ab6d03 100644 --- a/chart/templates/worker/split-names/deployment.yaml +++ b/chart/templates/worker/split-names/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-split-names" + name: "{{ include "name" . }}-worker-split-names" @@ -22 +22 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 1e77431f..6d2a4c20 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -6,2 +6,2 @@ - image: {{ .Values.dockerImage.workers.datasets_based }} - imagePullPolicy: {{ .Values.docker.pullPolicy }} + image: {{ include "workers.datasetsBased.image" . }} + imagePullPolicy: {{ .Values.images.pullPolicy }} diff --git a/chart/templates/worker/splits/deployment.yaml b/chart/templates/worker/splits/deployment.yaml index d0ec3103..0a264547 100644 --- a/chart/templates/worker/splits/deployment.yaml +++ b/chart/templates/worker/splits/deployment.yaml @@ -8 +8 @@ metadata: - name: "{{ include "release" . }}-worker-splits" + name: "{{ include "name" . }}-worker-splits" @@ -22,3 +22,2 @@ spec: - imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} - initContainers: - {{ include "initContainerCache" . | nindent 8 }} + {{- include "image.imagePullSecrets" . | nindent 6 }} + initContainers: {{ include "initContainerCache" . | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index 0f1f70e8..2ac6369b 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -1 +1,42 @@ -# --- common parameters --- +global: + huggingface: + imageRegistry: "" + imagePullSecrets: [] + privateHub: + enabled: false + ingress: + domain: huggingface.co + subdomains: + datasetsServer: datasets-server + +images: + pullPolicy: IfNotPresent + pullSecrets: [] + reverseProxy: + useGlobalRegistry: false + registry: docker.io + repository: nginx + tag: "1.20" + jobs: + mongodbMigration: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-jobs-mongodb_migration + tag: sha-2f38593 + services: + admin: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-services-admin + tag: sha-2f38593 + api: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-services-api + tag: sha-2f38593 + workers: + datasetsBased: + registry: huggingface + useGlobalRegistry: false + repository: datasets-server-workers-datasets_based + tag: sha-2f38593 @@ -3 +44,8 @@ -hostname: "datasets-server.huggingface.co" + +common: + # URL of the HuggingFace Hub + hfEndpoint: "" + # Log level + logLevel: "INFO" + +# --- common parameters --- @@ -31 +79,2 @@ mongodb: - enabled: false + enabled: true + nameOverride: datasets-server-mongodb @@ -38,5 +87 @@ mongodb: -imagePullSecrets: [] - -docker: - pullPolicy: IfNotPresent - +# TODO : Update Makefile script to update this values @@ -80,5 +124,0 @@ assets: -common: - # URL of the HuggingFace Hub - hfEndpoint: "https://huggingface.co" - # Log level - logLevel: "INFO" @@ -98 +138 @@ mongodbMigration: - cpu: 1 + cpu: 0 @@ -100 +140 @@ mongodbMigration: - cpu: 1 + cpu: 0 @@ -110 +150 @@ storageAdmin: - cpu: 1 + cpu: 0 @@ -112 +152 @@ storageAdmin: - cpu: 1 + cpu: 0 @@ -114 +154 @@ storageAdmin: - type: NodePort + type: ClusterIP @@ -120,0 +161 @@ reverseProxy: + nameOverride: datasets-server-mongodb @@ -122 +163 @@ reverseProxy: - port: 80 + port: 8080 @@ -128 +168,0 @@ reverseProxy: - readinessPort: 80 @@ -132 +172 @@ reverseProxy: - cpu: 1 + cpu: 0 @@ -134 +174 @@ reverseProxy: - cpu: 1 + cpu: 0 @@ -136 +176 @@ reverseProxy: - type: NodePort + type: ClusterIP @@ -165 +205 @@ admin: - uvicornPort: 80 + uvicornPort: 8080 @@ -168 +207,0 @@ admin: - readinessPort: 80 @@ -172 +211 @@ admin: - cpu: 1 + cpu: 0 @@ -174 +213 @@ admin: - cpu: 1 + cpu: 0 @@ -176 +215 @@ admin: - type: NodePort + type: ClusterIP @@ -196 +235 @@ api: - uvicornPort: 80 + uvicornPort: 8080 @@ -199 +237,0 @@ api: - readinessPort: 80 @@ -203 +241 @@ api: - cpu: 1 + cpu: 0 @@ -205 +243 @@ api: - cpu: 1 + cpu: 0 @@ -207 +245 @@ api: - type: NodePort + type: ClusterIP @@ -253 +291 @@ splits: - cpu: 1 + cpu: 0 @@ -255 +293 @@ splits: - cpu: 1 + cpu: 0 @@ -278 +316 @@ firstRows: - cpu: 1 + cpu: 0 @@ -280 +318 @@ firstRows: - cpu: 1 + cpu: 0 @@ -306 +344 @@ parquetAndDatasetInfo: - cpu: 1 + cpu: 0 @@ -308 +346 @@ parquetAndDatasetInfo: - cpu: 1 + cpu: 0 @@ -320 +358 @@ parquet: - cpu: 1 + cpu: 0 @@ -322 +360 @@ parquet: - cpu: 1 + cpu: 0 @@ -334 +372 @@ datasetInfo: - cpu: 1 + cpu: 0 @@ -336 +374 @@ datasetInfo: - cpu: 1 + cpu: 0 @@ -348 +386 @@ sizes: - cpu: 1 + cpu: 0 @@ -350 +388 @@ sizes: - cpu: 1 + cpu: 0
fbce53c0d706e638a4aa7542c3e466ce50aff328
Sylvain Lesage
2023-01-30T13:46:28
fix: 🐛 add a missing default value for org name in admin/ (#722)
diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 82cd50fc..1025004d 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -31 +31 @@ services: - ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-} + ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-huggingface}
d7caf78752601632e2d45ad36d9351f137298b8e
Sylvain Lesage
2023-01-30T13:46:17
feat: 🎸 update docker images (#723)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 51062cb2..be8a9de6 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-f70513e" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-2f38593" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-f70513e", - "api": "huggingface/datasets-server-services-api:sha-f70513e" + "admin": "huggingface/datasets-server-services-admin:sha-2f38593", + "api": "huggingface/datasets-server-services-api:sha-2f38593" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-30d5ce2" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-2f38593"
7cc7367140e0988df18125dd2e6dce03e806a304
Albert Villanova del Moral
2023-01-30T13:38:19
Trigger CI by PRs from forks (#713)
diff --git a/.github/workflows/chart.yml b/.github/workflows/chart.yml index ef433df4..5d6114f3 100644 --- a/.github/workflows/chart.yml +++ b/.github/workflows/chart.yml @@ -10,0 +11,4 @@ on: + pull_request: + paths: + - 'chart/**' + - '.github/workflows/chart.yml' diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index cc84356c..69dba2fc 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -16,0 +17,10 @@ on: + pull_request: + paths: + - 'e2e/**' + - 'chart/docker-images.yaml' + - 'chart/static-files/openapi.json' + - '.github/workflows/_e2e_tests.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/e2e.yml' + - 'tools/Python.mk' + - 'tools/docker-compose-datasets-server.yml' diff --git a/.github/workflows/j-migration.yml b/.github/workflows/j-migration.yml index b4a1c620..358f0e3e 100644 --- a/.github/workflows/j-migration.yml +++ b/.github/workflows/j-migration.yml @@ -11 +11,9 @@ on: - - '.github/workflows/j-mongodb_migration.yml' + - '.github/workflows/j-migration.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/_unit-tests-python.yml' + - 'tools/docker-compose-mongo.yml' + pull_request: + paths: + - 'jobs/mongodb_migration/**' + - 'libs/libcommon/**' + - '.github/workflows/j-migration.yml' diff --git a/.github/workflows/l-libcommon.yml b/.github/workflows/l-libcommon.yml index d1b50732..094c7077 100644 --- a/.github/workflows/l-libcommon.yml +++ b/.github/workflows/l-libcommon.yml @@ -13,0 +14,7 @@ on: + pull_request: + paths: + - 'libs/libcommon/**' + - '.github/workflows/l-libcommon.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/_unit-tests-python.yml' + - 'tools/docker-compose-mongo.yml' diff --git a/.github/workflows/openapi-spec.yml b/.github/workflows/openapi-spec.yml index 8eb686d1..c1feb493 100644 --- a/.github/workflows/openapi-spec.yml +++ b/.github/workflows/openapi-spec.yml @@ -10,0 +11,4 @@ on: + pull_request: + paths: + - 'chart/static-files/opanapi.json' + - '.github/workflows/openapi.yml' diff --git a/.github/workflows/s-admin.yml b/.github/workflows/s-admin.yml index a1c05c5e..57773bfe 100644 --- a/.github/workflows/s-admin.yml +++ b/.github/workflows/s-admin.yml @@ -14,0 +15,8 @@ on: + pull_request: + paths: + - 'libs/libcommon/**' + - 'services/admin/**' + - '.github/workflows/s-admin.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/_unit-tests-python.yml' + - 'tools/docker-compose-mongo.yml' diff --git a/.github/workflows/s-api.yml b/.github/workflows/s-api.yml index 0323f6c6..ef8d5ff4 100644 --- a/.github/workflows/s-api.yml +++ b/.github/workflows/s-api.yml @@ -14,0 +15,8 @@ on: + pull_request: + paths: + - 'libs/libcommon/**' + - 'services/api/**' + - '.github/workflows/s-api.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/_unit-tests-python.yml' + - 'tools/docker-compose-mongo.yml' diff --git a/.github/workflows/w-datasets_based.yml b/.github/workflows/w-datasets_based.yml index b2f6aed2..a454793f 100644 --- a/.github/workflows/w-datasets_based.yml +++ b/.github/workflows/w-datasets_based.yml @@ -15,0 +16,9 @@ on: + pull_request: + paths: + - 'libs/libcommon/**' + - 'workers/datasets_based/**' + - '.github/workflows/w-datasets_based.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/_unit-tests-python.yml' + - 'tools/docker-compose-mongo.yml' + - 'vendors/'
2f38593da9a8ed577ce00e4f21c5b1293f836c54
Sylvain Lesage
2023-01-30T13:28:12
fix: 🐛 don't check if dataset is supported when we know it is (#720)
diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index 89b1a9da..6b29be44 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -62 +62 @@ jobs: - fail_ci_if_error: true + fail_ci_if_error: false diff --git a/libs/libcommon/src/libcommon/dataset.py b/libs/libcommon/src/libcommon/dataset.py index f6a1efe6..50abc75a 100644 --- a/libs/libcommon/src/libcommon/dataset.py +++ b/libs/libcommon/src/libcommon/dataset.py @@ -219 +219 @@ def get_supported_datasets(hf_endpoint: str, hf_token: Optional[str] = None) -> - return [d.id for d in HfApi(endpoint=hf_endpoint, token=hf_token).list_datasets() if d.id is not None] + return [d.id for d in HfApi(endpoint=hf_endpoint, token=hf_token).list_datasets() if d.id and not d.private] diff --git a/libs/libcommon/src/libcommon/operations.py b/libs/libcommon/src/libcommon/operations.py index 10ffb72f..07c0674a 100644 --- a/libs/libcommon/src/libcommon/operations.py +++ b/libs/libcommon/src/libcommon/operations.py @@ -28,0 +29 @@ def update_dataset( + do_check_support: bool = True, @@ -39,0 +41 @@ def update_dataset( + do_check_support (bool, optional): Check if the dataset is supported. Defaults to True. @@ -46 +48,2 @@ def update_dataset( - check_support(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + if do_check_support: + check_support(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) diff --git a/services/admin/src/admin/routes/backfill.py b/services/admin/src/admin/routes/backfill.py index f9f51160..1b449929 100644 --- a/services/admin/src/admin/routes/backfill.py +++ b/services/admin/src/admin/routes/backfill.py @@ -44,0 +45 @@ def create_backfill_endpoint( + do_check_support=False,
4414d57b95bcd4ff0ac519e6893a9e58e5069058
Albert Villanova del Moral
2023-01-30T09:14:24
Update datasets to 2.9.0 (#715)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 8d7ccee7..51062cb2 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-f70513e" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-30d5ce2" diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index c1222cae..80a236e5 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -883 +883 @@ name = "datasets" -version = "2.8.0" +version = "2.9.0" @@ -889,2 +889,2 @@ files = [ - {file = "datasets-2.8.0-py3-none-any.whl", hash = "sha256:f36cb362bb5587659bab18e594b6d25d9d28486d735a571319c82efeb5a4e5df"}, - {file = "datasets-2.8.0.tar.gz", hash = "sha256:a843b69593914071f921fc1086fde939f30a63415a34cdda5db3c0acdd58aff2"}, + {file = "datasets-2.9.0-py3-none-any.whl", hash = "sha256:f1aa5b98959cddb30f5077448204c8ce4235a4f1c8ec2473920660ebd6fc304f"}, + {file = "datasets-2.9.0.tar.gz", hash = "sha256:c82458d635539b5a5dbed0fba8837006dfc3c213a5bcc00e18a67789f0f0f16f"}, @@ -912 +912 @@ xxhash = "*" -apache-beam = ["apache-beam (>=2.26.0)"] +apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"] @@ -915 +915 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "black (>=22.0,<23.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] +dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=22.0,<23.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] @@ -922 +922 @@ tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] +tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] @@ -5439 +5439 @@ python-versions = "3.9.15" -content-hash = "d813a3067825668630455a2c36c1f9b52d3704431cc41bc4860400f729e570de" +content-hash = "1d75ae381d53e8abcc1896f6f33c71afadc51f9ea0d809e9effadacf716fa628" diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index f638ed71..76e98168 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.8.0" } +datasets = { extras = ["audio", "vision"], version = "~2.9.0" }
c6c4872d5c588f9d752d746fd4f688405c3ce3b2
Sylvain Lesage
2023-01-27T22:32:38
ci: 🎡 build and push the docker images only on push to main (#717)
diff --git a/.github/workflows/_build_push_docker_hub.yml b/.github/workflows/build_push_docker_hub.yml similarity index 62% rename from .github/workflows/_build_push_docker_hub.yml rename to .github/workflows/build_push_docker_hub.yml index 0a68ba78..19e91ad7 100644 --- a/.github/workflows/_build_push_docker_hub.yml +++ b/.github/workflows/build_push_docker_hub.yml @@ -4 +4 @@ -name: Build and push service docker image to public Docker Hub +name: Build and push docker images to public Docker Hub @@ -6,13 +6,4 @@ on: - workflow_call: - inputs: - directory: - required: true - type: string - project: - required: true - type: string - secrets: - dockerhub-username: - required: true - dockerhub-password: - required: true + workflow_dispatch: + push: + branches: + - main @@ -22,0 +14,11 @@ jobs: + strategy: + matrix: + include: + - directory: jobs + project: mongodb_migration + - directory: services + project: admin + - directory: services + project: api + - directory: workers + project: datasets_based @@ -35,2 +37,2 @@ jobs: - username: ${{ secrets.dockerhub-username }} - password: ${{ secrets.dockerhub-password }} + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} @@ -41 +43 @@ jobs: - images: ${{ env.repository-prefix }}${{ inputs.directory }}-${{ inputs.project }} + images: ${{ env.repository-prefix }}${{ matrix.directory }}-${{ matrix.project }} @@ -48 +50 @@ jobs: - file: ${{ inputs.directory }}/${{ inputs.project }}/Dockerfile + file: ${{ matrix.directory }}/${{ matrix.project }}/Dockerfile @@ -54,2 +56,2 @@ jobs: - cache-from: type=registry,ref=${{ env.repository-prefix }}${{ inputs.directory }}-${{ inputs.project }}:buildcache - cache-to: type=registry,ref=${{ env.repository-prefix }}${{ inputs.directory }}-${{ inputs.project }}:buildcache,mode=max + cache-from: type=registry,ref=${{ env.repository-prefix }}${{ matrix.directory }}-${{ matrix.project }}:buildcache + cache-to: type=registry,ref=${{ env.repository-prefix }}${{ matrix.directory }}-${{ matrix.project }}:buildcache,mode=max diff --git a/.github/workflows/j-migration-build-docker.yml b/.github/workflows/j-migration-build-docker.yml deleted file mode 100644 index a3ec4ff3..00000000 --- a/.github/workflows/j-migration-build-docker.yml +++ /dev/null @@ -1,24 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: jobs/mongodb_migration -on: - workflow_dispatch: - push: - paths: - - 'jobs/mongodb_migration/Dockerfile' - - 'jobs/mongodb_migration/src/**' - - 'jobs/mongodb_migration/poetry.lock' - - 'jobs/mongodb_migration/pyproject.toml' - - 'libs/libcommon/**' - - '.github/workflows/j-mongodb_migration-build-docker.yml' - - '.github/workflows/_build_push_docker_hub.yml' -jobs: - docker: - uses: ./.github/workflows/_build_push_docker_hub.yml - with: - directory: jobs - project: mongodb_migration - secrets: - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_PASSWORD }} diff --git a/.github/workflows/s-admin-build-docker.yml b/.github/workflows/s-admin-build-docker.yml deleted file mode 100644 index ce5298cd..00000000 --- a/.github/workflows/s-admin-build-docker.yml +++ /dev/null @@ -1,24 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: services/admin -on: - workflow_dispatch: - push: - paths: - - 'libs/libcommon/**' - - 'services/admin/Dockerfile' - - 'services/admin/src/**' - - 'services/admin/poetry.lock' - - 'services/admin/pyproject.toml' - - '.github/workflows/s-admin-build-docker.yml' - - '.github/workflows/_build_push_docker_hub.yml' -jobs: - docker: - uses: ./.github/workflows/_build_push_docker_hub.yml - with: - directory: services - project: admin - secrets: - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_PASSWORD }} diff --git a/.github/workflows/s-api-build-docker.yml b/.github/workflows/s-api-build-docker.yml deleted file mode 100644 index 913b5648..00000000 --- a/.github/workflows/s-api-build-docker.yml +++ /dev/null @@ -1,24 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: services/api -on: - workflow_dispatch: - push: - paths: - - 'libs/libcommon/**' - - 'services/api/Dockerfile' - - 'services/api/src/**' - - 'services/api/poetry.lock' - - 'services/api/pyproject.toml' - - '.github/workflows/s-api-build-docker.yml' - - '.github/workflows/_build_push_docker_hub.yml' -jobs: - docker: - uses: ./.github/workflows/_build_push_docker_hub.yml - with: - directory: services - project: api - secrets: - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_PASSWORD }} diff --git a/.github/workflows/w-datasets_based-build-docker.yml b/.github/workflows/w-datasets_based-build-docker.yml deleted file mode 100644 index b7a55326..00000000 --- a/.github/workflows/w-datasets_based-build-docker.yml +++ /dev/null @@ -1,25 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: workers/datasets_based -on: - workflow_dispatch: - push: - paths: - - 'libs/libcommon/**' - - 'workers/datasets_based/Dockerfile' - - 'workers/datasets_based/src/**' - - 'workers/datasets_based/poetry.lock' - - 'workers/datasets_based/pyproject.toml' - - '.github/workflows/w-datasets_based-build-docker.yml' - - '.github/workflows/_build_push_docker_hub.yml' - - 'vendors/' -jobs: - docker: - uses: ./.github/workflows/_build_push_docker_hub.yml - with: - directory: workers - project: datasets_based - secrets: - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_PASSWORD }}
85e7aac92064e6d2359b029595c316b78f68bd78
Sylvain Lesage
2023-01-27T22:21:03
ci: 🎡 build the images before running the e2e tests (#716)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 8d8493f8..cdc7ea4d 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -7,4 +6,0 @@ on: - inputs: - config-file: - required: true - type: string @@ -18,17 +13,0 @@ jobs: - get-config: - runs-on: ubuntu-latest - outputs: - dockerConfig: ${{ steps.set-var.outputs.dockerConfig }} - steps: - - uses: actions/checkout@v3 - with: - path: './' - - name: Get docker images - id: set-var - run: | - # from https://trstringer.com/github-actions-multiline-strings/ - dockerConfig=`cat ${{ inputs.config-file }}` - # end of optional handling for multi line json - echo "dockerConfig<<EOF" >> $GITHUB_OUTPUT - echo "$dockerConfig" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT @@ -36 +14,0 @@ jobs: - needs: get-config @@ -43,11 +21,5 @@ jobs: - - uses: actions/checkout@v3 - - name: Install poetry - run: pipx install poetry==${{ env.poetry-version }} - - name: Use Python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.python-version }} - cache: 'poetry' - cache-dependency-path: | - ${{ env.working-directory }}/poetry.lock - - name: Launch the services + - + name: Checkout + uses: actions/checkout@v3 + - + name: Build and launch the services (no cache) @@ -55,4 +26,0 @@ jobs: - IMAGE_REVERSE_PROXY: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.reverseProxy}}" - IMAGE_SERVICE_ADMIN: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.admin}}" - IMAGE_SERVICE_API: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.api}}" - IMAGE_WORKER_DATASETS_BASED: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.workers.datasets_based}}" @@ -72,3 +40,15 @@ jobs: - working-directory: tools - run: docker compose -f ./docker-compose-datasets-server.yml up -d - - name: Install dependencies + run: docker compose -f docker-compose-datasets-server.yml up -d + working-directory: ./tools + - + name: Install poetry + run: pipx install poetry==${{ env.poetry-version }} + - + name: Use Python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.python-version }} + cache: 'poetry' + cache-dependency-path: | + ${{ env.working-directory }}/poetry.lock + - + name: Install dependencies @@ -78 +58,2 @@ jobs: - - name: End-to-end tests + - + name: End-to-end tests diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 75b8d647..cc84356c 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -24,2 +23,0 @@ jobs: - with: - config-file: chart/docker-images.yaml diff --git a/Makefile b/Makefile index 11efd315..5a2af974 100644 --- a/Makefile +++ b/Makefile @@ -10 +10 @@ DOCKER_COMPOSE := ./tools/docker-compose-datasets-server.yml -DOCKER_IMAGES := ./chart/docker-images.yaml +# DOCKER_IMAGES := ./chart/docker-images.yaml @@ -12 +12 @@ DOCKER_IMAGES := ./chart/docker-images.yaml -include tools/DockerRemoteImages.mk +# include tools/DockerRemoteImages.mk diff --git a/e2e/Makefile b/e2e/Makefile index 7b97af7c..b12d45d5 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -19 +19 @@ DOCKER_COMPOSE := ../tools/docker-compose-datasets-server.yml -DOCKER_IMAGES := ../chart/docker-images.yaml +# DOCKER_IMAGES := ../chart/docker-images.yaml @@ -23 +23 @@ include ../tools/PythonTest.mk -include ../tools/DockerRemoteImages.mk +# include ../tools/DockerRemoteImages.mk diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 4d7b5b28..82cd50fc 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -4 +4,2 @@ services: - image: ${IMAGE_REVERSE_PROXY?IMAGE_REVERSE_PROXY env var must be provided} + image: docker.io/nginx:1.20 + # image: ${IMAGE_REVERSE_PROXY?IMAGE_REVERSE_PROXY env var must be provided} @@ -21,4 +22,4 @@ services: - # build: - # context: .. - # dockerfile: services/admin/Dockerfile - image: ${IMAGE_SERVICE_ADMIN?IMAGE_SERVICE_ADMIN env var must be provided} + build: + context: .. + dockerfile: services/admin/Dockerfile + # image: ${IMAGE_SERVICE_ADMIN?IMAGE_SERVICE_ADMIN env var must be provided} @@ -48,4 +49,4 @@ services: - # build: - # context: .. - # dockerfile: services/api/Dockerfile - image: ${IMAGE_SERVICE_API?IMAGE_SERVICE_API env var must be provided} + build: + context: .. + dockerfile: services/api/Dockerfile + # image: ${IMAGE_SERVICE_API?IMAGE_SERVICE_API env var must be provided} @@ -73,4 +74,4 @@ services: - # build: - # context: .. - # dockerfile: workers/datasets_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -90,4 +91,4 @@ services: - # build: - # context: .. - # dockerfile: workers/datasets_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -107,4 +108,4 @@ services: - # build: - # context: .. - # dockerfile: workers/datasets_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -124,4 +125,4 @@ services: - # build: - # context: .. - # dockerfile: workers/datasets_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -151,4 +152,4 @@ services: - # build: - # context: .. - # dockerfile: workers/dataset_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -176,4 +177,4 @@ services: - # build: - # context: .. - # dockerfile: workers/dataset_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -189,4 +190,4 @@ services: - # build: - # context: .. - # dockerfile: workers/dataset_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -202,4 +203,4 @@ services: - # build: - # context: .. - # dockerfile: workers/dataset_based/Dockerfile - image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + build: + context: .. + dockerfile: workers/datasets_based/Dockerfile + # image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -215 +216 @@ services: - image: mongo + image: docker.io/mongo
9d22c9676248f4c39b97bc8683db9f7a633f53b6
Albert Villanova del Moral
2023-01-27T13:48:56
Update poetry lock file format to 2.0 (#714)
6e63de2ba027a593095fda810eba4e672aa09976
Sylvain Lesage
2023-01-27T10:20:52
feat: 🎸 add a /backfill admin endpoint (#708)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 047546f5..a0809933 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-48dee06" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-1c9e36d" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-3de1315", - "api": "huggingface/datasets-server-services-api:sha-3de1315" + "admin": "huggingface/datasets-server-services-admin:sha-bb27740", + "api": "huggingface/datasets-server-services-api:sha-1c9e36d" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-3de1315" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-1c9e36d" diff --git a/libs/libcommon/src/libcommon/dataset.py b/libs/libcommon/src/libcommon/dataset.py index 3d4af686..f6a1efe6 100644 --- a/libs/libcommon/src/libcommon/dataset.py +++ b/libs/libcommon/src/libcommon/dataset.py @@ -215,0 +216,4 @@ def check_support( + + +def get_supported_datasets(hf_endpoint: str, hf_token: Optional[str] = None) -> list[str]: + return [d.id for d in HfApi(endpoint=hf_endpoint, token=hf_token).list_datasets() if d.id is not None] diff --git a/libs/libcommon/src/libcommon/operations.py b/libs/libcommon/src/libcommon/operations.py index 8fcf0949..10ffb72f 100644 --- a/libs/libcommon/src/libcommon/operations.py +++ b/libs/libcommon/src/libcommon/operations.py @@ -11 +11 @@ from libcommon.processing_graph import ProcessingStep -from libcommon.queue import Queue +from libcommon.queue import Priority, Queue @@ -27,0 +28 @@ def update_dataset( + priority: Priority = Priority.NORMAL, @@ -37,0 +39 @@ def update_dataset( + priority (Priority, optional): The priority of the job. Defaults to Priority.NORMAL. @@ -48 +50 @@ def update_dataset( - Queue(type=init_processing_step.job_type).upsert_job(dataset=dataset, force=force) + Queue(type=init_processing_step.job_type).upsert_job(dataset=dataset, force=force, priority=priority) @@ -70,0 +73 @@ def move_dataset( + priority: Priority = Priority.NORMAL, @@ -84,0 +88 @@ def move_dataset( + priority (Priority, optional): The priority of the job. Defaults to Priority.NORMAL. @@ -97,0 +102 @@ def move_dataset( + priority=priority, @@ -144,0 +150,2 @@ def check_in_process( + force=False, + priority=Priority.NORMAL, @@ -155,0 +163,2 @@ def check_in_process( + force=False, + priority=Priority.NORMAL, diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index 5c85dbfb..7a8de583 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -73,0 +74 @@ class JobInfo(TypedDict): + priority: Priority @@ -381,0 +383 @@ class Queue: + "priority": next_waiting_job.priority, diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 6a7665bd..c9788837 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -13,0 +14 @@ from admin.prometheus import Prometheus +from admin.routes.backfill import create_backfill_endpoint @@ -67,0 +69,13 @@ def create_app() -> Starlette: + + [ + Route( + "/backfill", + endpoint=create_backfill_endpoint( + init_processing_steps=app_config.processing_graph.graph.get_first_steps(), + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + external_auth_url=app_config.external_auth_url, + organization=app_config.admin.hf_organization, + ), + methods=["POST"], + ) + ] diff --git a/services/admin/src/admin/routes/backfill.py b/services/admin/src/admin/routes/backfill.py new file mode 100644 index 00000000..f9f51160 --- /dev/null +++ b/services/admin/src/admin/routes/backfill.py @@ -0,0 +1,59 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from libcommon.dataset import get_supported_datasets +from libcommon.operations import update_dataset +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Priority +from starlette.requests import Request +from starlette.responses import Response + +from admin.authentication import auth_check +from admin.utils import ( + AdminCustomError, + Endpoint, + UnexpectedError, + get_json_admin_error_response, + get_json_ok_response, +) + + +def create_backfill_endpoint( + init_processing_steps: list[ProcessingStep], + hf_endpoint: str, + hf_token: Optional[str] = None, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, +) -> Endpoint: + async def backfill_endpoint(request: Request) -> Response: + try: + logging.info("/backfill") + + # if auth_check fails, it will raise an exception that will be caught below + auth_check(external_auth_url=external_auth_url, request=request, organization=organization) + for dataset in get_supported_datasets(hf_endpoint=hf_endpoint, hf_token=hf_token): + update_dataset( + dataset=dataset, + init_processing_steps=init_processing_steps, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + force=False, + priority=Priority.LOW, + ) + # ^ we simply ask an update for all the datasets on the Hub, supported by the datasets-server + # we could be more precise and only ask for updates for the datasets that have some missing + # cache entries, but it's not easy to check. + # Also: we could try to do a batch update of the database, instead of one query per dataset + return get_json_ok_response( + {"status": "ok"}, + max_age=0, + ) + except AdminCustomError as e: + return get_json_admin_error_response(e, max_age=0) + except Exception: + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=0) + + return backfill_endpoint diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index 67bdfaff..42715ec5 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -10,0 +11 @@ from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Priority @@ -73,0 +75 @@ def process_payload( + priority=Priority.NORMAL, @@ -87,0 +90 @@ def process_payload( + priority=Priority.NORMAL, diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py index 68ce0078..670d2030 100644 --- a/workers/datasets_based/src/datasets_based/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -13 +13 @@ from libcommon.processing_graph import ProcessingStep -from libcommon.queue import JobInfo, Queue, Status +from libcommon.queue import JobInfo, Priority, Queue, Status @@ -109 +109 @@ class Worker(ABC): - and the force flag. + the force flag, and the priority level. @@ -120,0 +121 @@ class Worker(ABC): + priority: Priority @@ -145,0 +147 @@ class Worker(ABC): + self.priority = job_info["priority"] @@ -381,0 +384 @@ class Worker(ABC): + priority=self.priority, diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py index f9992eba..ac170948 100644 --- a/workers/datasets_based/tests/test_worker.py +++ b/workers/datasets_based/tests/test_worker.py @@ -6 +6 @@ from libcommon.processing_graph import ProcessingGraph, ProcessingStep -from libcommon.queue import Queue, Status, _clean_queue_database +from libcommon.queue import Priority, Queue, Status, _clean_queue_database @@ -65,0 +66 @@ def test_compare_major_version( + "priority": Priority.NORMAL, @@ -92,0 +94 @@ def test_should_skip_job( + "priority": Priority.NORMAL, @@ -121,0 +124 @@ def test_check_type( + "priority": Priority.NORMAL, @@ -144,0 +148 @@ def test_check_type( + "priority": Priority.NORMAL, @@ -168,0 +173 @@ def test_create_children_jobs() -> None: + "priority": Priority.LOW, @@ -182,0 +188 @@ def test_create_children_jobs() -> None: + assert child_dataset_jobs[0]["priority"] is Priority.LOW.value @@ -187,0 +194 @@ def test_create_children_jobs() -> None: + assert child_config_jobs[0]["priority"] is Priority.LOW.value @@ -190 +197,4 @@ def test_create_children_jobs() -> None: - assert all(job["dataset"] == "dataset" and job["config"] == "config" for job in child_split_jobs) + assert all( + job["dataset"] == "dataset" and job["config"] == "config" and job["priority"] == Priority.LOW.value + for job in child_split_jobs + ) diff --git a/workers/datasets_based/tests/test_worker_factory.py b/workers/datasets_based/tests/test_worker_factory.py index 468841df..bb8d56e4 100644 --- a/workers/datasets_based/tests/test_worker_factory.py +++ b/workers/datasets_based/tests/test_worker_factory.py @@ -6,0 +7 @@ import pytest +from libcommon.queue import Priority @@ -34,0 +36 @@ def test_create_worker(app_config: AppConfig, job_type: str, expected_worker: Op + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test__datasets_based_worker.py b/workers/datasets_based/tests/workers/test__datasets_based_worker.py index c9b946b0..3995803e 100644 --- a/workers/datasets_based/tests/workers/test__datasets_based_worker.py +++ b/workers/datasets_based/tests/workers/test__datasets_based_worker.py @@ -9,0 +10 @@ import pytest +from libcommon.queue import Priority @@ -49,0 +51 @@ def get_worker( + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_config_names.py b/workers/datasets_based/tests/workers/test_config_names.py index e90f4efd..3a58469b 100644 --- a/workers/datasets_based/tests/workers/test_config_names.py +++ b/workers/datasets_based/tests/workers/test_config_names.py @@ -8,0 +9 @@ from libcommon.exceptions import CustomError +from libcommon.queue import Priority @@ -29,0 +31 @@ def get_worker( + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_dataset_info.py b/workers/datasets_based/tests/workers/test_dataset_info.py index 62f98d6d..d569e14d 100644 --- a/workers/datasets_based/tests/workers/test_dataset_info.py +++ b/workers/datasets_based/tests/workers/test_dataset_info.py @@ -7,0 +8 @@ import pytest +from libcommon.queue import Priority @@ -32,0 +34 @@ def get_worker(dataset: str, app_config: AppConfig, force: bool = False) -> Data + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_first_rows.py b/workers/datasets_based/tests/workers/test_first_rows.py index c734bd00..bc122814 100644 --- a/workers/datasets_based/tests/workers/test_first_rows.py +++ b/workers/datasets_based/tests/workers/test_first_rows.py @@ -9,0 +10 @@ from libcommon.exceptions import CustomError +from libcommon.queue import Priority @@ -33,0 +35 @@ def get_worker( + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_parquet.py b/workers/datasets_based/tests/workers/test_parquet.py index 7e644e94..b157b98e 100644 --- a/workers/datasets_based/tests/workers/test_parquet.py +++ b/workers/datasets_based/tests/workers/test_parquet.py @@ -7,0 +8 @@ import pytest +from libcommon.queue import Priority @@ -32,0 +34 @@ def get_worker(dataset: str, app_config: AppConfig, force: bool = False) -> Parq + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py b/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py index e7239bda..696300a2 100644 --- a/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py +++ b/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py @@ -11,0 +12 @@ from libcommon.exceptions import CustomError +from libcommon.queue import Priority @@ -67,0 +69 @@ def get_worker( + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_sizes.py b/workers/datasets_based/tests/workers/test_sizes.py index a132baae..ec010f33 100644 --- a/workers/datasets_based/tests/workers/test_sizes.py +++ b/workers/datasets_based/tests/workers/test_sizes.py @@ -7,0 +8 @@ import pytest +from libcommon.queue import Priority @@ -32,0 +34 @@ def get_worker(dataset: str, app_config: AppConfig, force: bool = False) -> Size + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_split_names.py b/workers/datasets_based/tests/workers/test_split_names.py index 1d2c5f7b..0c030ff3 100644 --- a/workers/datasets_based/tests/workers/test_split_names.py +++ b/workers/datasets_based/tests/workers/test_split_names.py @@ -8,0 +9 @@ from libcommon.exceptions import CustomError +from libcommon.queue import Priority @@ -30,0 +32 @@ def get_worker( + "priority": Priority.NORMAL, diff --git a/workers/datasets_based/tests/workers/test_splits.py b/workers/datasets_based/tests/workers/test_splits.py index bcfca473..66bb9523 100644 --- a/workers/datasets_based/tests/workers/test_splits.py +++ b/workers/datasets_based/tests/workers/test_splits.py @@ -8,0 +9 @@ from libcommon.exceptions import CustomError +from libcommon.queue import Priority @@ -29,0 +31 @@ def get_worker( + "priority": Priority.NORMAL,
290f5be529d8c040b5638437cf25fa2fc6da0a53
Sylvain Lesage
2023-01-26T18:46:03
fix: 🐛 fix migration script (#707)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index f1b01179..047546f5 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-3de1315" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-48dee06" diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py index b73f135e..43291b6f 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py @@ -35 +35 @@ class MigrationAddPriorityToJob(Migration): - if doc.priority != "normal": + if doc.priority != Priority.NORMAL:
ebdb9feaf09dc508267be1ae4fd0d259567a61cb
Sylvain Lesage
2023-01-26T18:12:57
Add priority field to queue (#705)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index e8001e77..f1b01179 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-da3070a" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-3de1315" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-da3070a", - "api": "huggingface/datasets-server-services-api:sha-da3070a" + "admin": "huggingface/datasets-server-services-admin:sha-3de1315", + "api": "huggingface/datasets-server-services-api:sha-3de1315" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-da3070a" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-3de1315" diff --git a/jobs/mongodb_migration/src/mongodb_migration/collector.py b/jobs/mongodb_migration/src/mongodb_migration/collector.py index 01ce2590..71cada64 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/collector.py +++ b/jobs/mongodb_migration/src/mongodb_migration/collector.py @@ -13,0 +14,3 @@ from mongodb_migration.migrations._20221117223000_cache_generic_response import +from mongodb_migration.migrations._20230126164900_queue_job_add_priority import ( + MigrationAddPriorityToJob, +) @@ -27,0 +31,4 @@ class MigrationsCollector: + MigrationAddPriorityToJob( + version="20230126164900", + description="add 'priority' field to jobs in queue database", + ), diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py new file mode 100644 index 00000000..b73f135e --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20230126164900_queue_job_add_priority.py @@ -0,0 +1,123 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import enum +import logging +import types +from typing import Generic, Type, TypeVar + +from mongoengine import Document +from mongoengine.connection import get_db +from mongoengine.fields import BooleanField, DateTimeField, EnumField, StringField +from mongoengine.queryset.queryset import QuerySet + +from mongodb_migration.check import check_documents +from mongodb_migration.migration import Migration + + +# connection already occurred in the main.py (caveat: we use globals) +class MigrationAddPriorityToJob(Migration): + def up(self) -> None: + # See https://docs.mongoengine.org/guide/migration.html#example-1-addition-of-a-field + logging.info("Add the priority field, with the default value ('normal'), to all the jobs") + db = get_db("queue") + db["jobsBlue"].update_many({}, {"$set": {"priority": "normal"}}) + + def down(self) -> None: + logging.info("Remove the priority field from all the jobs") + db = get_db("queue") + db["jobsBlue"].update_many({}, {"$unset": {"priority": ""}}) + + def validate(self) -> None: + logging.info("Ensure that a random selection of jobs have the 'priority' field set to 'normal'") + + def custom_validation(doc: JobSnapshot) -> None: + if doc.priority != "normal": + raise ValueError("priority should be 'normal'") + + check_documents(DocCls=JobSnapshot, sample_size=10, custom_validation=custom_validation) + if JobSnapshot.objects(priority=Priority.NORMAL).count() != JobSnapshot.objects.count(): + raise ValueError('All the objects should have the "priority" field, set to "normal"') + + +# --- JobSnapshot --- +# copied from libcommon.queue.Job, as a snapshot of when the migration was created +class Status(enum.Enum): + WAITING = "waiting" + STARTED = "started" + SUCCESS = "success" + ERROR = "error" + CANCELLED = "cancelled" + SKIPPED = "skipped" + + +class Priority(enum.Enum): + NORMAL = "normal" + LOW = "low" + + +# START monkey patching ### hack ### +# see https://github.com/sbdchd/mongo-types#install +U = TypeVar("U", bound=Document) + + +def no_op(self, x): # type: ignore + return self + + +QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) + + +class QuerySetManager(Generic[U]): + def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: + return QuerySet(cls, cls._get_collection()) + + +# END monkey patching ### hack ### + + +class JobSnapshot(Document): + """A job in the mongoDB database + + Args: + type (`str`): The type of the job, identifies the queue + dataset (`str`): The dataset on which to apply the job. + config (`str`, optional): The config on which to apply the job. + split (`str`, optional): The config on which to apply the job. + unicity_id (`str`): A string that identifies the job uniquely. Only one job with the same unicity_id can be in + the started state. + namespace (`str`): The dataset namespace (user or organization) if any, else the dataset name (canonical name). + force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. + priority (`Priority`, optional): The priority of the job. Defaults to Priority.NORMAL. + status (`Status`, optional): The status of the job. Defaults to Status.WAITING. + created_at (`datetime`): The creation date of the job. + started_at (`datetime`, optional): When the job has started. + finished_at (`datetime`, optional): When the job has finished. + """ + + meta = { + "collection": "jobsBlue", + "db_alias": "queue", + "indexes": [ + "status", + ("type", "status"), + ("type", "dataset", "status"), + ("type", "dataset", "config", "split", "status", "force", "priority"), + ("status", "type", "created_at", "namespace", "unicity_id", "priority"), + "-created_at", + ], + } + type = StringField(required=True) + dataset = StringField(required=True) + config = StringField() + split = StringField() + unicity_id = StringField(required=True) + namespace = StringField(required=True) + force = BooleanField(default=False) + priority = EnumField(Priority, default=Priority.NORMAL) + status = EnumField(Status, default=Status.WAITING) + created_at = DateTimeField(required=True) + started_at = DateTimeField() + finished_at = DateTimeField() + + objects = QuerySetManager["JobSnapshot"]() diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index 4420bf50..5c85dbfb 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -3,0 +4 @@ +import contextlib @@ -45,0 +47,5 @@ class Status(enum.Enum): +class Priority(enum.Enum): + NORMAL = "normal" + LOW = "low" + + @@ -53,0 +60 @@ class JobDict(TypedDict): + priority: str @@ -112,0 +120 @@ class Job(Document): + priority (`Priority`, optional): The priority of the job. Defaults to Priority.NORMAL. @@ -126,2 +134,2 @@ class Job(Document): - ("type", "dataset", "config", "split", "status"), - ("status", "type", "created_at", "namespace", "unicity_id"), + ("type", "dataset", "config", "split", "status", "force", "priority"), + ("status", "type", "created_at", "namespace", "unicity_id", "priority"), @@ -137,0 +146 @@ class Job(Document): + priority = EnumField(Priority, default=Priority.NORMAL) @@ -151,0 +161 @@ class Job(Document): + "priority": self.priority.value, @@ -170,3 +180,4 @@ class Queue: - - a job can be in the queue only once (unicity_id) in the "started" state - - a job can be in the queue multiple times in the other states (waiting, success, error, cancelled, skipped) - - the queue is ordered by the creation date of the jobs + - a job can be in the queue only once (unicity_id) in the "started" or "waiting" state + - a job can be in the queue multiple times in the other states (success, error, cancelled, skipped) + - a job has a priority (two levels: NORMAL and LOW) + - the queue is ordered by priority then by the creation date of the jobs @@ -191 +202,6 @@ class Queue: - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + priority: Priority = Priority.NORMAL, @@ -201,0 +218 @@ class Queue: + priority (`Priority`, optional): The priority of the job. Defaults to Priority.NORMAL. @@ -212,0 +230 @@ class Queue: + priority=priority, @@ -218 +236,6 @@ class Queue: - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + priority: Priority = Priority.NORMAL, @@ -223,0 +247,2 @@ class Queue: + Note that the new job inherits the force=True property if one of the previous waiting jobs had it. + In the same way, the new job inherits the highest priority. @@ -229,0 +255 @@ class Queue: + priority (`Priority`, optional): The priority of the job. Defaults to Priority.NORMAL. @@ -233,10 +259,13 @@ class Queue: - Job.objects(type=self.type, dataset=dataset, config=config, split=split, status=Status.WAITING).update( - finished_at=get_datetime(), status=Status.CANCELLED - ) - return self._add_job(dataset=dataset, config=config, split=split, force=force) - - def get_next_waiting_job(self) -> Job: - """Get the next job in the queue. - - Get the waiting job with the oldest creation date: - - first, among the datasets that still have no started job. + existing = Job.objects(type=self.type, dataset=dataset, config=config, split=split, status=Status.WAITING) + if existing(force=True).count() > 0: + force = True + if existing(priority=Priority.NORMAL).count() > 0: + priority = Priority.NORMAL + existing.update(finished_at=get_datetime(), status=Status.CANCELLED) + return self._add_job(dataset=dataset, config=config, split=split, force=force, priority=priority) + + def _get_next_waiting_job_for_priority(self, priority: Priority) -> Job: + """Get the next job in the queue for a given priority. + + For a given priority, get the waiting job with the oldest creation date: + - among the datasets that still have no started job. @@ -259,0 +289 @@ class Queue: + priority=priority, @@ -294,0 +325 @@ class Queue: + priority=priority, @@ -302,0 +334,23 @@ class Queue: + raise EmptyQueueError( + f"no job available with the priority (within the limit of {self.max_jobs_per_namespace} started jobs per" + " namespace)" + ) + + def get_next_waiting_job(self) -> Job: + """Get the next job in the queue. + + Get the waiting job with the oldest creation date with the following criteria: + - among the highest priority jobs, + - among the datasets that still have no started job. + - if none, among the datasets that have the least started jobs: + - in the limit of `max_jobs_per_namespace` jobs per namespace + - ensuring that the unicity_id field is unique among the started jobs. + + Raises: + EmptyQueueError: if there is no waiting job in the queue that satisfies the restrictions above. + + Returns: the job + """ + for priority in [Priority.NORMAL, Priority.LOW]: + with contextlib.suppress(EmptyQueueError): + return self._get_next_waiting_job_for_priority(priority) diff --git a/libs/libcommon/tests/test_queue.py b/libs/libcommon/tests/test_queue.py index 67b72199..b3ab02d2 100644 --- a/libs/libcommon/tests/test_queue.py +++ b/libs/libcommon/tests/test_queue.py @@ -10 +10,7 @@ from libcommon.config import QueueConfig -from libcommon.queue import EmptyQueueError, Queue, Status, _clean_queue_database +from libcommon.queue import ( + EmptyQueueError, + Priority, + Queue, + Status, + _clean_queue_database, +) @@ -81 +87 @@ def test_upsert_job() -> None: - assert job_info["force"] is False + assert job_info["force"] is True # the new job inherits from waiting forced jobs @@ -84 +90 @@ def test_upsert_job() -> None: - queue.upsert_job(dataset=test_dataset, force=True) + queue.upsert_job(dataset=test_dataset, force=False) @@ -94 +100 @@ def test_upsert_job() -> None: - assert job_info["force"] is True + assert job_info["force"] is False # the new jobs does not inherit from started forced jobs @@ -109 +115 @@ def check_job(queue: Queue, expected_dataset: str, expected_split: str) -> None: -def test_priority_to_non_started_datasets() -> None: +def test_priority_logic() -> None: @@ -115 +121,2 @@ def test_priority_to_non_started_datasets() -> None: - queue.upsert_job(dataset="dataset2", config="config", split="split1") + queue.upsert_job(dataset="dataset2", config="config", split="split1", priority=Priority.LOW) + queue.upsert_job(dataset="dataset2/dataset", config="config", split="split1", priority=Priority.LOW) @@ -117,0 +125 @@ def test_priority_to_non_started_datasets() -> None: + queue.upsert_job(dataset="dataset3", config="config", split="split1", priority=Priority.LOW) @@ -118,0 +127 @@ def test_priority_to_non_started_datasets() -> None: + queue.upsert_job(dataset="dataset2", config="config", split="split1", priority=Priority.LOW) @@ -120 +129 @@ def test_priority_to_non_started_datasets() -> None: - check_job(queue=queue, expected_dataset="dataset2", expected_split="split1") + check_job(queue=queue, expected_dataset="dataset2", expected_split="split2") @@ -121,0 +131 @@ def test_priority_to_non_started_datasets() -> None: + # ^ before the other "dataset3" jobs because its priority is higher (it inherited Priority.NORMAL in upsert_job) @@ -123 +133 @@ def test_priority_to_non_started_datasets() -> None: - check_job(queue=queue, expected_dataset="dataset2", expected_split="split2") + # ^ same namespace as dataset1/dataset, goes after namespaces without any started job @@ -124,0 +135,5 @@ def test_priority_to_non_started_datasets() -> None: + # ^ comes after the other "dataset1" jobs because the last upsert_job call moved its creation date + check_job(queue=queue, expected_dataset="dataset2/dataset", expected_split="split1") + # ^ comes after the other "dataset2" jobs because its priority is lower + check_job(queue=queue, expected_dataset="dataset2", expected_split="split1") + # ^ the rest of the rules apply for Priority.LOW jobs
b69c0f0dcb7fa005b9ee1e2eea3bb1dc6c452956
Sylvain Lesage
2023-01-26T17:52:21
feat: 🎸 make /first-rows depend on /split-names, not /splits (#706)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index c1ec7c1d..e8001e77 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-690d1cd" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-da3070a" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-4dc0383", - "api": "huggingface/datasets-server-services-api:sha-4dc0383" + "admin": "huggingface/datasets-server-services-admin:sha-da3070a", + "api": "huggingface/datasets-server-services-api:sha-da3070a" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-690d1cd" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-da3070a" diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index ce3d2e46..68be3939 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -149 +149 @@ class ProcessingGraphConfig: - "/first-rows": {"input_type": "split", "requires": "/splits", "required_by_dataset_viewer": True}, + "/first-rows": {"input_type": "split", "requires": "/split-names", "required_by_dataset_viewer": True}, diff --git a/libs/libcommon/tests/test_processing_steps.py b/libs/libcommon/tests/test_processing_steps.py index ceef62a5..19b61ebe 100644 --- a/libs/libcommon/tests/test_processing_steps.py +++ b/libs/libcommon/tests/test_processing_steps.py @@ -27 +27 @@ def test_default_graph(): - assert split_names.children == [] + assert split_names.children == [first_rows] @@ -32 +32 @@ def test_default_graph(): - assert splits.children == [first_rows] + assert splits.children == [] @@ -36 +36 @@ def test_default_graph(): - assert first_rows.parent is splits + assert first_rows.parent is split_names @@ -38 +38 @@ def test_default_graph(): - assert first_rows.get_ancestors() == [splits] + assert first_rows.get_ancestors() == [config_names, split_names]
fd106cbaed2ba500af91e3bdd19925fe44f2b15e
Kevin Leffew
2023-01-26T16:19:20
Update index.mdx (#693)
diff --git a/docs/source/index.mdx b/docs/source/index.mdx index abf155b2..241f2e86 100644 --- a/docs/source/index.mdx +++ b/docs/source/index.mdx @@ -11 +11 @@ Let Datasets Server take care of the heavy lifting so you can: -Join the growing community on the [forum](https://discuss.huggingface.co/) or [Discord](https://discord.com/invite/JfAtkvEtRb) today, and give the [Datasets Server repository](https://huggingface.co/docs/datasets-server/index) a ⭐️ if you're interested in the latest updates! +Join the growing community on the [forum](https://discuss.huggingface.co/) or [Discord](https://discord.com/invite/JfAtkvEtRb) today, and give the [Datasets Server repository](https://github.com/huggingface/datasets-server) a ⭐️ if you're interested in the latest updates!
7d4e4d613b32ebe7335e5f5c76a6cdb258e5fc5e
Sylvain Lesage
2023-01-26T14:14:32
Configs and splits (#702)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index cecf827b..c1ec7c1d 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-5364f81" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-690d1cd" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-5364f81", - "api": "huggingface/datasets-server-services-api:sha-5364f81" + "admin": "huggingface/datasets-server-services-admin:sha-4dc0383", + "api": "huggingface/datasets-server-services-api:sha-4dc0383" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-5364f81" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-690d1cd" diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index c71fdca5..1f0d6244 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -88,0 +89,16 @@ api: +configNames: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 + +splitsNames: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 + diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 4a8c5b93..bdec9ba3 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -176,0 +177,34 @@ api: +configNames: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 3 + + nodeSelector: + role-datasets-server: "true" + replicas: 8 + resources: + requests: + cpu: 1 + memory: "1Gi" + limits: + cpu: 2 + memory: "30Gi" + +splitNames: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 5 + + nodeSelector: + role-datasets-server: "true" + replicas: 12 + resources: + requests: + cpu: 1 + memory: "8Gi" + limits: + cpu: 2 + memory: "30Gi" + diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index c7778403..7c592a14 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -72,0 +73,10 @@ app: "{{ include "release" . }}-api" +{{- define "labels.configNames" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-worker-config-names" +{{- end -}} + +{{- define "labels.splitNames" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-worker-split-names" +{{- end -}} + diff --git a/chart/templates/worker/config-names/_container.tpl b/chart/templates/worker/config-names/_container.tpl new file mode 100644 index 00000000..31b3951c --- /dev/null +++ b/chart/templates/worker/config-names/_container.tpl @@ -0,0 +1,28 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerWorkerConfigNames" -}} +- name: "{{ include "name" . }}-worker-config-names" + image: {{ .Values.dockerImage.workers.datasets_based }} + imagePullPolicy: {{ .Values.docker.pullPolicy }} + env: + - name: DATASETS_BASED_ENDPOINT + value: "/config-names" + # ^ hard-coded + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} + {{ include "envDatasetsBased" . | nindent 2 }} + - name: DATASETS_BASED_HF_DATASETS_CACHE + value: {{ printf "%s/config-names/datasets" .Values.cacheDirectory | quote }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} + # overridden + value: {{ .Values.configNames.queue.maxJobsPerNamespace | quote }} + volumeMounts: + {{ include "volumeMountCache" . | nindent 2 }} + securityContext: + allowPrivilegeEscalation: false + resources: {{ toYaml .Values.configNames.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/worker/config-names/deployment.yaml b/chart/templates/worker/config-names/deployment.yaml new file mode 100644 index 00000000..4b8de337 --- /dev/null +++ b/chart/templates/worker/config-names/deployment.yaml @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.configNames" . | nindent 4 }} + name: "{{ include "release" . }}-worker-config-names" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.configNames.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.configNames" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + labels: {{ include "labels.configNames" . | nindent 8 }} + spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: + {{ include "initContainerCache" . | nindent 8 }} + containers: {{ include "containerWorkerConfigNames" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.configNames.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.configNames.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/worker/split-names/_container.tpl b/chart/templates/worker/split-names/_container.tpl new file mode 100644 index 00000000..f44d5666 --- /dev/null +++ b/chart/templates/worker/split-names/_container.tpl @@ -0,0 +1,28 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerWorkerSplitNames" -}} +- name: "{{ include "name" . }}-worker-split-names" + image: {{ .Values.dockerImage.workers.datasets_based }} + imagePullPolicy: {{ .Values.docker.pullPolicy }} + env: + - name: DATASETS_BASED_ENDPOINT + value: "/split-names" + # ^ hard-coded + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} + {{ include "envDatasetsBased" . | nindent 2 }} + - name: DATASETS_BASED_HF_DATASETS_CACHE + value: {{ printf "%s/split-names/datasets" .Values.cacheDirectory | quote }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} + # overridden + value: {{ .Values.splitNames.queue.maxJobsPerNamespace | quote }} + volumeMounts: + {{ include "volumeMountCache" . | nindent 2 }} + securityContext: + allowPrivilegeEscalation: false + resources: {{ toYaml .Values.splitNames.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/worker/split-names/deployment.yaml b/chart/templates/worker/split-names/deployment.yaml new file mode 100644 index 00000000..baee269c --- /dev/null +++ b/chart/templates/worker/split-names/deployment.yaml @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.splitNames" . | nindent 4 }} + name: "{{ include "release" . }}-worker-split-names" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.splitNames.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.splitNames" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + labels: {{ include "labels.splitNames" . | nindent 8 }} + spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: + {{ include "initContainerCache" . | nindent 8 }} + containers: {{ include "containerWorkerSplitNames" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.splitNames.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.splitNames.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index 9c6e2673..0f1f70e8 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -212,0 +213,30 @@ api: +configNames: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 + + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] + +splitNames: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 + + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] + diff --git a/e2e/tests/test_11_auth.py b/e2e/tests/test_11_auth.py index b8684ef9..aff84ea2 100644 --- a/e2e/tests/test_11_auth.py +++ b/e2e/tests/test_11_auth.py @@ -41,0 +42,2 @@ def test_auth_e2e( + f"/config-names?dataset={dataset}", + f"/split-names?dataset={dataset}&config={config}", diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index 8c87617e..ce3d2e46 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -146 +146,3 @@ class ProcessingGraphConfig: - "/splits": {"input_type": "dataset", "required_by_dataset_viewer": True}, + "/config-names": {"input_type": "dataset"}, + "/split-names": {"input_type": "config", "requires": "/config-names"}, + "/splits": {"input_type": "dataset", "required_by_dataset_viewer": True}, # to be deprecated diff --git a/libs/libcommon/src/libcommon/processing_graph.py b/libs/libcommon/src/libcommon/processing_graph.py index 09090184..80ffd6cf 100644 --- a/libs/libcommon/src/libcommon/processing_graph.py +++ b/libs/libcommon/src/libcommon/processing_graph.py @@ -9 +9 @@ from typing import List, Literal, Mapping, Optional, TypedDict -InputType = Literal["dataset", "split"] +InputType = Literal["dataset", "config", "split"] diff --git a/libs/libcommon/tests/test_processing_steps.py b/libs/libcommon/tests/test_processing_steps.py index d101f080..ceef62a5 100644 --- a/libs/libcommon/tests/test_processing_steps.py +++ b/libs/libcommon/tests/test_processing_steps.py @@ -10,0 +11,2 @@ def test_default_graph(): + config_names = graph.get_step("/config-names") + split_names = graph.get_step("/split-names") @@ -18,6 +20,4 @@ def test_default_graph(): - assert splits is not None - assert first_rows is not None - assert parquet_and_dataset_info is not None - assert parquet is not None - assert dataset_info is not None - assert sizes is not None + assert config_names is not None + assert config_names.parent is None + assert config_names.children == [split_names] + assert config_names.get_ancestors() == [] @@ -25,6 +25,4 @@ def test_default_graph(): - assert splits.parent is None - assert first_rows.parent is splits - assert parquet_and_dataset_info.parent is None - assert parquet.parent is parquet_and_dataset_info - assert dataset_info.parent is parquet_and_dataset_info - assert sizes.parent is parquet_and_dataset_info + assert split_names is not None + assert split_names.parent is config_names + assert split_names.children == [] + assert split_names.get_ancestors() == [config_names] @@ -31,0 +30,2 @@ def test_default_graph(): + assert splits is not None + assert splits.parent is None @@ -33,6 +32,0 @@ def test_default_graph(): - assert first_rows.children == [] - assert parquet_and_dataset_info.children == [parquet, dataset_info, sizes] - assert parquet.children == [] - assert dataset_info.children == [] - assert sizes.children == [] - @@ -39,0 +34,4 @@ def test_default_graph(): + + assert first_rows is not None + assert first_rows.parent is splits + assert first_rows.children == [] @@ -40,0 +39,4 @@ def test_default_graph(): + + assert parquet_and_dataset_info is not None + assert parquet_and_dataset_info.parent is None + assert parquet_and_dataset_info.children == [parquet, dataset_info, sizes] @@ -41,0 +44,4 @@ def test_default_graph(): + + assert parquet is not None + assert parquet.parent is parquet_and_dataset_info + assert parquet.children == [] @@ -42,0 +49,4 @@ def test_default_graph(): + + assert dataset_info is not None + assert dataset_info.parent is parquet_and_dataset_info + assert dataset_info.children == [] @@ -43,0 +54,4 @@ def test_default_graph(): + + assert sizes is not None + assert sizes.parent is parquet_and_dataset_info + assert sizes.children == [] @@ -46 +60 @@ def test_default_graph(): - assert graph.get_first_steps() == [splits, parquet_and_dataset_info] + assert graph.get_first_steps() == [config_names, splits, parquet_and_dataset_info] diff --git a/services/admin/src/admin/routes/force_refresh.py b/services/admin/src/admin/routes/force_refresh.py index 94d37939..45ea1279 100644 --- a/services/admin/src/admin/routes/force_refresh.py +++ b/services/admin/src/admin/routes/force_refresh.py @@ -39,0 +40,5 @@ def create_force_refresh_endpoint( + elif processing_step.input_type == "config": + config = request.query_params.get("config") + split = None + if not are_valid_parameters([config]): + raise MissingRequiredParameterError("Parameter 'config' is required") diff --git a/services/api/src/api/routes/processing_step.py b/services/api/src/api/routes/processing_step.py index 20f4ef2e..0875a58e 100644 --- a/services/api/src/api/routes/processing_step.py +++ b/services/api/src/api/routes/processing_step.py @@ -46,0 +47,5 @@ def create_processing_step_endpoint( + elif processing_step.input_type == "config": + config = request.query_params.get("config") + split = None + if not are_valid_parameters([config]): + raise MissingRequiredParameterError("Parameter 'config' is required") diff --git a/services/api/tests/conftest.py b/services/api/tests/conftest.py index 708ef6c6..63893ddc 100644 --- a/services/api/tests/conftest.py +++ b/services/api/tests/conftest.py @@ -56,0 +57,5 @@ def first_dataset_processing_step(app_config: AppConfig): +@fixture(scope="session") +def first_config_processing_step(app_config: AppConfig): + return next(step for step in app_config.processing_graph.graph.steps.values() if step.input_type == "config") + + diff --git a/services/api/tests/routes/test_valid.py b/services/api/tests/routes/test_valid.py index 340ef913..07e7361b 100644 --- a/services/api/tests/routes/test_valid.py +++ b/services/api/tests/routes/test_valid.py @@ -19,0 +20,9 @@ dataset_step = ProcessingStep( +config_step = ProcessingStep( + endpoint="/config-step", + input_type="config", + requires=None, + required_by_dataset_viewer=False, + parent=None, + ancestors=[], + children=[], +) @@ -71,0 +81 @@ def test_one_step( + ([config_step], True, ["dataset"]), @@ -73 +83 @@ def test_one_step( - ([dataset_step, split_step], True, ["dataset"]), + ([dataset_step, config_step, split_step], True, ["dataset"]), @@ -76 +86 @@ def test_one_step( -def test_two_steps( +def test_three_steps( @@ -82,0 +93 @@ def test_two_steps( + upsert_response(kind=config_step.cache_kind, dataset=dataset, config=config, content={}, http_status=HTTPStatus.OK) diff --git a/services/api/tests/test_app.py b/services/api/tests/test_app.py index 4267df8c..0ff1f0c1 100644 --- a/services/api/tests/test_app.py +++ b/services/api/tests/test_app.py @@ -103,0 +104,20 @@ def test_get_step(client: TestClient, first_dataset_processing_step: ProcessingS [email protected]( + "dataset,config", + [ + (None, None), + ("a", None), + ("a", ""), + ], +) +def test_get_config_missing_parameter( + client: TestClient, + dataset: Optional[str], + config: Optional[str], + first_config_processing_step: ProcessingStep, +) -> None: + response = client.get( + first_config_processing_step.endpoint, params={"dataset": dataset, "config": config, "split": None} + ) + assert response.status_code == 422 + + @@ -113 +133 @@ def test_get_step(client: TestClient, first_dataset_processing_step: ProcessingS -def test_get_first_rows_missing_parameter( +def test_get_split_missing_parameter( diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 604c441b..4d7b5b28 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -71,0 +72,34 @@ services: + worker-config-names: + # build: + # context: .. + # dockerfile: workers/datasets_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + volumes: + - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + DATASETS_BASED_ENDPOINT: "/config-names" # hard-coded + depends_on: + - mongodb + restart: always + worker-split-names: + # build: + # context: .. + # dockerfile: workers/datasets_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + volumes: + - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + DATASETS_BASED_ENDPOINT: "/split-names" # hard-coded + depends_on: + - mongodb + restart: always diff --git a/workers/datasets_based/src/datasets_based/config.py b/workers/datasets_based/src/datasets_based/config.py index 21151bca..3c2e8844 100644 --- a/workers/datasets_based/src/datasets_based/config.py +++ b/workers/datasets_based/src/datasets_based/config.py @@ -20 +20 @@ from libcommon.config import ( -DATASETS_BASED_ENDPOINT = "/splits" +DATASETS_BASED_ENDPOINT = "/config-names" diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py index 16ebe1d4..68ce0078 100644 --- a/workers/datasets_based/src/datasets_based/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -331,4 +331,5 @@ class Worker(ABC): - dataset_children = [c for c in self.processing_step.children if c.input_type == "dataset"] - for processing_step in dataset_children: - Queue(type=processing_step.job_type).upsert_job( - dataset=self.dataset, config=None, split=None, force=self.force + if len(self.processing_step.children) <= 0: + return + try: + response_in_cache = get_response( + kind=self.processing_step.cache_kind, dataset=self.dataset, config=self.config, split=self.split @@ -336,30 +337,32 @@ class Worker(ABC): - - split_children = [c for c in self.processing_step.children if c.input_type == "split"] - if len(split_children) > 0: - try: - response_in_cache = get_response( - kind=self.processing_step.cache_kind, dataset=self.dataset, config=self.config, split=self.split - ) - except Exception: - # if the response is not in the cache, we don't create the children jobs - return - if response_in_cache["http_status"] != HTTPStatus.OK: - # if the response is not valid, we don't create the children jobs - return - new_split_full_names: set[SplitFullName] = self.get_new_splits(response_in_cache["content"]) - for processing_step in split_children: - # remove obsolete responses from the cache - split_full_names_in_cache = get_split_full_names_for_dataset_and_kind( - dataset=self.dataset, kind=processing_step.cache_kind - ) - split_full_names_to_delete = split_full_names_in_cache.difference(new_split_full_names) - for split_full_name in split_full_names_to_delete: - delete_response( - kind=processing_step.cache_kind, - dataset=split_full_name.dataset, - config=split_full_name.config, - split=split_full_name.split, - ) - logging.debug( - f"{len(split_full_names_to_delete)} {processing_step.endpoint} responses deleted from the cache" - f" for obsolete splits of dataset={self.dataset}" + except Exception: + # if the response is not in the cache, we don't create the children jobs + return + if response_in_cache["http_status"] != HTTPStatus.OK: + # if the response is not valid, we don't create the children jobs + return + new_split_full_names_for_split: set[SplitFullName] = self.get_new_splits(response_in_cache["content"]) + new_split_full_names_for_config: set[SplitFullName] = { + SplitFullName(dataset=s.dataset, config=s.config, split=None) for s in new_split_full_names_for_split + } + new_split_full_names_for_dataset: set[SplitFullName] = { + SplitFullName(dataset=s.dataset, config=None, split=None) for s in new_split_full_names_for_config + } # should be self.dataset + for processing_step in self.processing_step.children: + new_split_full_names = ( + new_split_full_names_for_split + if processing_step.input_type == "split" + else new_split_full_names_for_config + if processing_step.input_type == "config" + else new_split_full_names_for_dataset + ) + # remove obsolete responses from the cache + split_full_names_in_cache = get_split_full_names_for_dataset_and_kind( + dataset=self.dataset, kind=processing_step.cache_kind + ) + split_full_names_to_delete = split_full_names_in_cache.difference(new_split_full_names) + for split_full_name in split_full_names_to_delete: + delete_response( + kind=processing_step.cache_kind, + dataset=split_full_name.dataset, + config=split_full_name.config, + split=split_full_name.split, @@ -367,12 +370,12 @@ class Worker(ABC): - # compute the responses for the new splits - for split_full_name in new_split_full_names: - # we force the refresh of the children step responses if the current step refresh was forced - Queue(type=processing_step.job_type).upsert_job( - dataset=split_full_name.dataset, - config=split_full_name.config, - split=split_full_name.split, - force=self.force, - ) - logging.debug( - f"{len(new_split_full_names)} {processing_step.job_type} jobs added for the splits of" - f" dataset={self.dataset}" + logging.debug( + f"{len(split_full_names_to_delete)} obsolete responses" + f"of kind {processing_step.cache_kind} deleted from cache for dataset={self.dataset}" + ) + # compute the responses for the new splits + for split_full_name in new_split_full_names: + # we force the refresh of the children step responses if the current step refresh was forced + Queue(type=processing_step.job_type).upsert_job( + dataset=split_full_name.dataset, + config=split_full_name.config, + split=split_full_name.split, + force=self.force, @@ -379,0 +383,4 @@ class Worker(ABC): + logging.debug( + f"{len(new_split_full_names)} jobs" + f"of type {processing_step.job_type} added to queue for dataset={self.dataset}" + ) diff --git a/workers/datasets_based/src/datasets_based/worker_factory.py b/workers/datasets_based/src/datasets_based/worker_factory.py index c27e220a..ca505b9c 100644 --- a/workers/datasets_based/src/datasets_based/worker_factory.py +++ b/workers/datasets_based/src/datasets_based/worker_factory.py @@ -9,0 +10 @@ from datasets_based.worker import JobInfo, Worker, WorkerFactory +from datasets_based.workers.config_names import ConfigNamesWorker @@ -14,0 +16 @@ from datasets_based.workers.sizes import SizesWorker +from datasets_based.workers.split_names import SplitNamesWorker @@ -23,0 +26,4 @@ class DatasetBasedWorkerFactory(WorkerFactory): + if job_type == ConfigNamesWorker.get_job_type(): + return ConfigNamesWorker(job_info=job_info, app_config=self.app_config) + if job_type == SplitNamesWorker.get_job_type(): + return SplitNamesWorker(job_info=job_info, app_config=self.app_config) @@ -26 +32 @@ class DatasetBasedWorkerFactory(WorkerFactory): - elif job_type == FirstRowsWorker.get_job_type(): + if job_type == FirstRowsWorker.get_job_type(): @@ -30 +36 @@ class DatasetBasedWorkerFactory(WorkerFactory): - elif job_type == ParquetAndDatasetInfoWorker.get_job_type(): + if job_type == ParquetAndDatasetInfoWorker.get_job_type(): @@ -36 +42 @@ class DatasetBasedWorkerFactory(WorkerFactory): - elif job_type == ParquetWorker.get_job_type(): + if job_type == ParquetWorker.get_job_type(): @@ -38 +44 @@ class DatasetBasedWorkerFactory(WorkerFactory): - elif job_type == DatasetInfoWorker.get_job_type(): + if job_type == DatasetInfoWorker.get_job_type(): @@ -40 +46 @@ class DatasetBasedWorkerFactory(WorkerFactory): - elif job_type == SizesWorker.get_job_type(): + if job_type == SizesWorker.get_job_type(): @@ -42,10 +48,11 @@ class DatasetBasedWorkerFactory(WorkerFactory): - else: - supported_job_types = [ - SplitsWorker.get_job_type(), - FirstRowsWorker.get_job_type(), - ParquetAndDatasetInfoWorker.get_job_type(), - ParquetWorker.get_job_type(), - DatasetInfoWorker.get_job_type(), - SizesWorker.get_job_type(), - ] - raise ValueError(f"Unsupported job type: '{job_type}'. The supported job types are: {supported_job_types}") + supported_job_types = [ + ConfigNamesWorker.get_job_type(), + SplitNamesWorker.get_job_type(), + SplitsWorker.get_job_type(), + FirstRowsWorker.get_job_type(), + ParquetAndDatasetInfoWorker.get_job_type(), + ParquetWorker.get_job_type(), + DatasetInfoWorker.get_job_type(), + SizesWorker.get_job_type(), + ] + raise ValueError(f"Unsupported job type: '{job_type}'. The supported job types are: {supported_job_types}") diff --git a/workers/datasets_based/src/datasets_based/workers/config_names.py b/workers/datasets_based/src/datasets_based/workers/config_names.py new file mode 100644 index 00000000..011c8475 --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/config_names.py @@ -0,0 +1,112 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from http import HTTPStatus +from typing import Any, List, Literal, Mapping, Optional, TypedDict, Union + +from datasets import get_dataset_config_names +from datasets.data_files import EmptyDatasetError as _EmptyDatasetError +from libcommon.exceptions import CustomError +from libcommon.simple_cache import SplitFullName + +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker + +ConfigNamesWorkerErrorCode = Literal["EmptyDatasetError", "ConfigNamesError"] + + +class ConfigNamesWorkerError(CustomError): + """Base class for worker exceptions.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: ConfigNamesWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__( + message=message, status_code=status_code, code=str(code), cause=cause, disclose_cause=disclose_cause + ) + + +class EmptyDatasetError(ConfigNamesWorkerError): + """Raised when the dataset has no data.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) + + +class ConfigNamesError(ConfigNamesWorkerError): + """Raised when the config names could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "ConfigNamesError", cause, True) + + +class ConfigNameItem(TypedDict): + dataset: str + config: str + + +class ConfigNamesResponseContent(TypedDict): + config_names: List[ConfigNameItem] + + +def compute_config_names_response( + dataset: str, + hf_token: Optional[str] = None, +) -> ConfigNamesResponseContent: + """ + Get the response of /config-names for one specific dataset on huggingface.co. + Dataset can be private or gated if you pass an acceptable token. + + It is assumed that the dataset exists and can be accessed using the token. + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `ConfigNamesResponseContent`: An object with the list of config names. + <Tip> + Raises the following errors: + - [`~workers.config_names.EmptyDatasetError`] + The dataset is empty. + - [`~workers.config_names.ConfigNamesError`] + If the list of configs could not be obtained using the datasets library. + </Tip> + """ + logging.info(f"get config names for dataset={dataset}") + use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False + # get the list of splits in streaming mode + try: + config_name_items: List[ConfigNameItem] = [ + {"dataset": dataset, "config": str(config)} + for config in sorted(get_dataset_config_names(path=dataset, use_auth_token=use_auth_token)) + ] + except _EmptyDatasetError as err: + raise EmptyDatasetError("The dataset is empty.", cause=err) from err + except Exception as err: + raise ConfigNamesError("Cannot get the config names for the dataset.", cause=err) from err + return {"config_names": config_name_items} + + +class ConfigNamesWorker(DatasetsBasedWorker): + @staticmethod + def get_job_type() -> str: + return "/config-names" + + @staticmethod + def get_version() -> str: + return "1.0.0" + + def compute(self) -> Mapping[str, Any]: + return compute_config_names_response(dataset=self.dataset, hf_token=self.common_config.hf_token) + + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return {SplitFullName(dataset=s["dataset"], config=s["config"], split=None) for s in content["config_names"]} diff --git a/workers/datasets_based/src/datasets_based/workers/dataset_info.py b/workers/datasets_based/src/datasets_based/workers/dataset_info.py index e1b24210..62555120 100644 --- a/workers/datasets_based/src/datasets_based/workers/dataset_info.py +++ b/workers/datasets_based/src/datasets_based/workers/dataset_info.py @@ -64 +64 @@ def compute_dataset_info_response(dataset: str) -> DatasetInfoResponse: - - [`~dataset_info.worker.PreviousStepStatusError`] + - [`~workers.dataset_info.PreviousStepStatusError`] @@ -66 +66 @@ def compute_dataset_info_response(dataset: str) -> DatasetInfoResponse: - - [`~dataset_info.worker.PreviousStepFormatError`] + - [`~workers.dataset_info.PreviousStepFormatError`] diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index 752ee9c8..fc00b636 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -416 +416 @@ def compute_first_rows_response( - - [`~libcommon.worker.ConfigNotFoundError`] + - [`libcommon.worker.ConfigNotFoundError`] @@ -418 +418 @@ def compute_first_rows_response( - - [`~libcommon.worker.SplitNotFoundError`] + - [`libcommon.worker.SplitNotFoundError`] @@ -420 +420 @@ def compute_first_rows_response( - - [`~worker.utils.InfoError`] + - [`~workers.first_rows.InfoError`] @@ -422 +422 @@ def compute_first_rows_response( - - [`~worker.utils.FeaturesError`] + - [`~workers.first_rows.FeaturesError`] @@ -424 +424 @@ def compute_first_rows_response( - - [`~worker.utils.StreamingRowsError`] + - [`~workers.first_rows.StreamingRowsError`] @@ -426 +426 @@ def compute_first_rows_response( - - [`~worker.utils.NormalRowsError`] + - [`~workers.first_rows.NormalRowsError`] @@ -428 +428 @@ def compute_first_rows_response( - - [`~worker.utils.RowsPostProcessingError`] + - [`~workers.first_rows.RowsPostProcessingError`] diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index 26475bde..9c66ebe8 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -65 +65 @@ def compute_parquet_response(dataset: str) -> ParquetResponse: - - [`~parquet.worker.PreviousStepStatusError`] + - [`~workers.parquet.PreviousStepStatusError`] @@ -67 +67 @@ def compute_parquet_response(dataset: str) -> ParquetResponse: - - [`~parquet.worker.PreviousStepFormatError`] + - [`~workers.parquet.PreviousStepFormatError`] diff --git a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py index a2f9a8b5..18fb3126 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py @@ -521 +521 @@ def compute_parquet_and_dataset_info_response( - - [`~parquet_and_dataset_info.worker.DatasetInBlockListError`] + - [`~workers.parquet_and_dataset_info.DatasetInBlockListError`] @@ -523 +523 @@ def compute_parquet_and_dataset_info_response( - - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + - [`libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. @@ -526,2 +526,2 @@ def compute_parquet_and_dataset_info_response( - - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. - - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + - [`libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the @@ -530,2 +530,3 @@ def compute_parquet_and_dataset_info_response( - - ['~requests.exceptions.HTTPError']: any other error when asking access - - [`~parquet_and_dataset_info.worker.DatasetRevisionNotFoundError`] + - ['HTTPError'](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): any other error when + asking access + - [`~workers.parquet_and_dataset_info.DatasetRevisionNotFoundError`] @@ -533 +534 @@ def compute_parquet_and_dataset_info_response( - - [`~parquet_and_dataset_info.worker.DatasetTooBigFromHubError`] + - [`~workers.parquet_and_dataset_info.DatasetTooBigFromHubError`] @@ -535 +536 @@ def compute_parquet_and_dataset_info_response( - - [`ValueError`] + - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) @@ -537 +538 @@ def compute_parquet_and_dataset_info_response( - - [`~parquet_and_dataset_info.worker.DatasetTooBigFromDatasetsError`] + - [`~workers.parquet_and_dataset_info.DatasetTooBigFromDatasetsError`] @@ -539 +540 @@ def compute_parquet_and_dataset_info_response( - - [`~parquet_and_dataset_info.worker.EmptyDatasetError`] + - [`~workers.parquet_and_dataset_info.EmptyDatasetError`] @@ -541 +542 @@ def compute_parquet_and_dataset_info_response( - - [`~parquet_and_dataset_info.worker.ConfigNamesError`] + - [`~workers.parquet_and_dataset_info.ConfigNamesError`] @@ -543 +544 @@ def compute_parquet_and_dataset_info_response( - - [`~parquet_and_dataset_info.worker.DatasetInBlockListError`] + - [`~workers.parquet_and_dataset_info.DatasetInBlockListError`] diff --git a/workers/datasets_based/src/datasets_based/workers/sizes.py b/workers/datasets_based/src/datasets_based/workers/sizes.py index cd848e6f..79ade862 100644 --- a/workers/datasets_based/src/datasets_based/workers/sizes.py +++ b/workers/datasets_based/src/datasets_based/workers/sizes.py @@ -98 +98 @@ def compute_sizes_response(dataset: str) -> SizesResponse: - - [`~sizes.worker.PreviousStepStatusError`] + - [`~workers.sizes.PreviousStepStatusError`] @@ -100 +100 @@ def compute_sizes_response(dataset: str) -> SizesResponse: - - [`~sizes.worker.PreviousStepFormatError`] + - [`~workers.sizes.PreviousStepFormatError`] diff --git a/workers/datasets_based/src/datasets_based/workers/split_names.py b/workers/datasets_based/src/datasets_based/workers/split_names.py new file mode 100644 index 00000000..4a3cfdfd --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/split_names.py @@ -0,0 +1,132 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from http import HTTPStatus +from typing import Any, List, Literal, Mapping, Optional, TypedDict, Union + +from datasets import get_dataset_split_names +from datasets.data_files import EmptyDatasetError as _EmptyDatasetError +from libcommon.exceptions import CustomError +from libcommon.simple_cache import SplitFullName + +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker + +SplitNamesWorkerErrorCode = Literal[ + "EmptyDatasetError", + "SplitNamesError", +] + + +class SplitNamesWorkerError(CustomError): + """Base class for worker exceptions.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: SplitNamesWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__( + message=message, status_code=status_code, code=str(code), cause=cause, disclose_cause=disclose_cause + ) + + +class SplitNamesError(SplitNamesWorkerError): + """Raised when the split names could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "SplitNamesError", cause, True) + + +class EmptyDatasetError(SplitNamesWorkerError): + """Raised when the dataset has no data.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) + + +class SplitNameItem(TypedDict): + dataset: str + config: str + split: str + + +class SplitNamesResponseContent(TypedDict): + split_names: List[SplitNameItem] + + +def compute_split_names_response( + dataset: str, + config: str, + hf_token: Optional[str] = None, +) -> SplitNamesResponseContent: + """ + Get the response of /split-names for one specific dataset and config on huggingface.co. + Dataset can be private or gated if you pass an acceptable token. + + It is assumed that the dataset exists and can be accessed using the token, and that the config exists in + the dataset. + + This function relies on the streaming mode if the splits are not directly defined in the dataset config. See + https://github.dev/huggingface/datasets/blob/e183a269067575db8765ee979bd8523d14a1adae/src/datasets/inspect.py#L389-L390 + + The /split-names response generated by this function does not include stats about the split, like the size or + number of samples. See /dataset-info or /sizes for that. + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + config (`str`): + A configuration name. + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `SplitNamesResponseContent`: An object with the list of split names for the dataset and config. + <Tip> + Raises the following errors: + - [`~workers.split_names.EmptyDatasetError`] + The dataset is empty. + - [`~workers.split_names.SplitsNamesError`] + If the list of splits could not be obtained using the datasets library. + </Tip> + """ + logging.info(f"get split names for dataset={dataset}, config={config}") + use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False + # get the list of splits in streaming mode + try: + split_name_items: List[SplitNameItem] = [ + {"dataset": dataset, "config": config, "split": str(split)} + for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) + ] + except _EmptyDatasetError as err: + raise EmptyDatasetError("The dataset is empty.", cause=err) from err + except Exception as err: + raise SplitNamesError("Cannot get the split names for the dataset and config.", cause=err) from err + return {"split_names": split_name_items} + + +class SplitNamesWorker(DatasetsBasedWorker): + @staticmethod + def get_job_type() -> str: + return "/split-names" + + @staticmethod + def get_version() -> str: + return "1.0.0" + + def compute(self) -> Mapping[str, Any]: + if self.config is None: + raise ValueError("config is required") + return compute_split_names_response( + dataset=self.dataset, config=self.config, hf_token=self.common_config.hf_token + ) + + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return { + SplitFullName(dataset=s["dataset"], config=s["config"], split=s["split"]) for s in content["split_names"] + } diff --git a/workers/datasets_based/src/datasets_based/workers/splits.py b/workers/datasets_based/src/datasets_based/workers/splits.py index 7c867788..118d1562 100644 --- a/workers/datasets_based/src/datasets_based/workers/splits.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -21 +21 @@ SplitsWorkerErrorCode = Literal[ -class SplitWorkerError(CustomError): +class SplitsWorkerError(CustomError): @@ -37 +37 @@ class SplitWorkerError(CustomError): -class SplitsNamesError(SplitWorkerError): +class SplitsNamesError(SplitsWorkerError): @@ -44 +44 @@ class SplitsNamesError(SplitWorkerError): -class EmptyDatasetError(SplitWorkerError): +class EmptyDatasetError(SplitsWorkerError): @@ -91 +91 @@ def compute_splits_response( - It is assumed that the dataset exist and can be accessed using the token. + It is assumed that the dataset exists and can be accessed using the token. @@ -96 +96,2 @@ def compute_splits_response( - The /splits response generated by this function does not include the optional "stats" field. See ./parquet.py + The /splits response generated by this function does not include stats about the split, like the size or number + of samples. See /dataset-info or /sizes for that. @@ -102,2 +102,0 @@ def compute_splits_response( - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") @@ -110 +109 @@ def compute_splits_response( - - [`~splits.worker.EmptyDatasetError`] + - [`~workers.splits.EmptyDatasetError`] @@ -112 +111 @@ def compute_splits_response( - - [`~splits.worker.SplitsNamesError`] + - [`~workers.splits.SplitsNamesError`] @@ -125 +123,0 @@ def compute_splits_response( - # As a rule, null values should have their fields removed -> "stats" field is not included diff --git a/workers/datasets_based/tests/fixtures/hub.py b/workers/datasets_based/tests/fixtures/hub.py index 79037ebb..244ab0fd 100644 --- a/workers/datasets_based/tests/fixtures/hub.py +++ b/workers/datasets_based/tests/fixtures/hub.py @@ -205,0 +206,2 @@ class HubDatasetTest(TypedDict): + config_names_response: Any + split_names_response: Any @@ -213,0 +216,25 @@ HubDatasets = Mapping[str, HubDatasetTest] +def create_config_names_response(dataset: str): + dataset, config, _ = get_default_config_split(dataset) + return { + "config_names": [ + { + "dataset": dataset, + "config": config, + } + ] + } + + +def create_split_names_response(dataset: str): + dataset, config, split = get_default_config_split(dataset) + return { + "split_names": [ + { + "dataset": dataset, + "config": config, + "split": split, + } + ] + } + + @@ -441,0 +469,2 @@ def hub_datasets( + "config_names_response": None, + "split_names_response": None, @@ -447,0 +477,2 @@ def hub_datasets( + "config_names_response": None, + "split_names_response": None, @@ -453,0 +485,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_public_csv), + "split_names_response": create_split_names_response(hub_public_csv), @@ -461,0 +495,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_private_csv), + "split_names_response": create_split_names_response(hub_private_csv), @@ -469,0 +505,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_gated_csv), + "split_names_response": create_split_names_response(hub_gated_csv), @@ -477,0 +515,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_public_jsonl), + "split_names_response": create_split_names_response(hub_public_jsonl), @@ -483,0 +523,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_gated_extra_fields_csv), + "split_names_response": create_split_names_response(hub_gated_extra_fields_csv), @@ -491,0 +533,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_public_audio), + "split_names_response": create_split_names_response(hub_public_audio), @@ -501,0 +545,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_public_image), + "split_names_response": create_split_names_response(hub_public_image), @@ -509,0 +555,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_public_images_list), + "split_names_response": create_split_names_response(hub_public_images_list), @@ -517,0 +565,2 @@ def hub_datasets( + "config_names_response": create_config_names_response(hub_public_big), + "split_names_response": create_split_names_response(hub_public_big), diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py index f765d39c..f9992eba 100644 --- a/workers/datasets_based/tests/test_worker.py +++ b/workers/datasets_based/tests/test_worker.py @@ -155,0 +156 @@ def test_create_children_jobs() -> None: + "/child-config": {"input_type": "config", "requires": "/dummy"}, @@ -181,0 +183,5 @@ def test_create_children_jobs() -> None: + child_config_jobs = Queue(type="/child-config").get_dump_with_status(status=Status.WAITING) + assert len(child_config_jobs) == 1 + assert child_config_jobs[0]["dataset"] == "dataset" + assert child_config_jobs[0]["config"] == "config" + assert child_config_jobs[0]["split"] is None diff --git a/workers/datasets_based/tests/test_worker_factory.py b/workers/datasets_based/tests/test_worker_factory.py index ea035989..468841df 100644 --- a/workers/datasets_based/tests/test_worker_factory.py +++ b/workers/datasets_based/tests/test_worker_factory.py @@ -15,0 +16 @@ from datasets_based.worker_factory import DatasetBasedWorkerFactory + ("/config-names", "ConfigNamesWorker"), diff --git a/workers/datasets_based/tests/workers/test_config_names.py b/workers/datasets_based/tests/workers/test_config_names.py new file mode 100644 index 00000000..e90f4efd --- /dev/null +++ b/workers/datasets_based/tests/workers/test_config_names.py @@ -0,0 +1,112 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from dataclasses import replace +from http import HTTPStatus + +import pytest +from libcommon.exceptions import CustomError +from libcommon.simple_cache import DoesNotExist, get_response + +from datasets_based.config import AppConfig +from datasets_based.workers.config_names import ConfigNamesWorker + +from ..fixtures.hub import HubDatasets + + +def get_worker( + dataset: str, + app_config: AppConfig, + force: bool = False, +) -> ConfigNamesWorker: + return ConfigNamesWorker( + job_info={ + "type": ConfigNamesWorker.get_job_type(), + "dataset": dataset, + "config": None, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + ) + + +def test_should_skip_job(app_config: AppConfig, hub_public_csv: str) -> None: + dataset = hub_public_csv + worker = get_worker(dataset, app_config) + assert worker.should_skip_job() is False + # we add an entry to the cache + worker.process() + assert worker.should_skip_job() is True + worker = get_worker(dataset, app_config, force=True) + assert worker.should_skip_job() is False + + +def test_process(app_config: AppConfig, hub_public_csv: str) -> None: + dataset = hub_public_csv + worker = get_worker(dataset, app_config) + assert worker.process() is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_csv) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + assert cached_response["worker_version"] == worker.get_version() + assert cached_response["dataset_git_revision"] is not None + assert cached_response["error_code"] is None + content = cached_response["content"] + assert len(content["config_names"]) == 1 + + +def test_doesnotexist(app_config: AppConfig) -> None: + dataset = "doesnotexist" + worker = get_worker(dataset, app_config) + assert worker.process() is False + with pytest.raises(DoesNotExist): + get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + + [email protected]( + "name,use_token,error_code,cause", + [ + ("public", False, None, None), + ("audio", False, None, None), + ("gated", True, None, None), + ("private", True, None, None), + ("empty", False, "EmptyDatasetError", "EmptyDatasetError"), + # should we really test the following cases? + # The assumption is that the dataset exists and is accessible with the token + ("does_not_exist", False, "ConfigNamesError", "FileNotFoundError"), + ("gated", False, "ConfigNamesError", "FileNotFoundError"), + ("private", False, "ConfigNamesError", "FileNotFoundError"), + ], +) +def test_compute_splits_response_simple_csv( + hub_datasets: HubDatasets, name: str, use_token: bool, error_code: str, cause: str, app_config: AppConfig +) -> None: + dataset = hub_datasets[name]["name"] + expected_configs_response = hub_datasets[name]["config_names_response"] + worker = get_worker( + dataset, + app_config if use_token else replace(app_config, common=replace(app_config.common, hf_token=None)), + ) + if error_code is None: + result = worker.compute() + assert result == expected_configs_response + return + + with pytest.raises(CustomError) as exc_info: + worker.compute() + assert exc_info.value.code == error_code + if cause is None: + assert exc_info.value.disclose_cause is False + assert exc_info.value.cause_exception is None + else: + assert exc_info.value.disclose_cause is True + assert exc_info.value.cause_exception == cause + response = exc_info.value.as_response() + assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} + response_dict = dict(response) + # ^ to remove mypy warnings + assert response_dict["cause_exception"] == cause + assert isinstance(response_dict["cause_traceback"], list) + assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" diff --git a/workers/datasets_based/tests/workers/test_first_rows.py b/workers/datasets_based/tests/workers/test_first_rows.py index 60fc1f39..c734bd00 100644 --- a/workers/datasets_based/tests/workers/test_first_rows.py +++ b/workers/datasets_based/tests/workers/test_first_rows.py @@ -40 +40 @@ def get_worker( -def should_skip_job(app_config: AppConfig, first_rows_config: FirstRowsConfig, hub_public_csv: str) -> None: +def test_should_skip_job(app_config: AppConfig, first_rows_config: FirstRowsConfig, hub_public_csv: str) -> None: diff --git a/workers/datasets_based/tests/workers/test_split_names.py b/workers/datasets_based/tests/workers/test_split_names.py new file mode 100644 index 00000000..1d2c5f7b --- /dev/null +++ b/workers/datasets_based/tests/workers/test_split_names.py @@ -0,0 +1,105 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from dataclasses import replace +from http import HTTPStatus + +import pytest +from libcommon.exceptions import CustomError +from libcommon.simple_cache import DoesNotExist, get_response + +from datasets_based.config import AppConfig +from datasets_based.workers.split_names import SplitNamesWorker + +from ..fixtures.hub import HubDatasets, get_default_config_split + + +def get_worker( + dataset: str, + config: str, + app_config: AppConfig, + force: bool = False, +) -> SplitNamesWorker: + return SplitNamesWorker( + job_info={ + "type": SplitNamesWorker.get_job_type(), + "dataset": dataset, + "config": config, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + ) + + +def test_process(app_config: AppConfig, hub_public_csv: str) -> None: + dataset, config, _ = get_default_config_split(hub_public_csv) + worker = get_worker(dataset, config, app_config) + assert worker.process() is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_csv, config=config) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + assert cached_response["worker_version"] == worker.get_version() + assert cached_response["dataset_git_revision"] is not None + assert cached_response["error_code"] is None + content = cached_response["content"] + assert len(content["split_names"]) == 1 + + +def test_doesnotexist(app_config: AppConfig) -> None: + dataset = "doesnotexist" + config = "some_config" + worker = get_worker(dataset, config, app_config) + assert worker.process() is False + with pytest.raises(DoesNotExist): + get_response(kind=worker.processing_step.cache_kind, dataset=dataset, config=config) + + [email protected]( + "name,use_token,error_code,cause", + [ + ("public", False, None, None), + ("audio", False, None, None), + ("gated", True, None, None), + ("private", True, None, None), + ("empty", False, "EmptyDatasetError", "EmptyDatasetError"), + # should we really test the following cases? + # The assumption is that the dataset exists and is accessible with the token + ("does_not_exist", False, "SplitNamesError", "FileNotFoundError"), + ("gated", False, "SplitNamesError", "FileNotFoundError"), + ("private", False, "SplitNamesError", "FileNotFoundError"), + ], +) +def test_compute_split_names_response( + hub_datasets: HubDatasets, name: str, use_token: bool, error_code: str, cause: str, app_config: AppConfig +) -> None: + dataset, config, _ = get_default_config_split(hub_datasets[name]["name"]) + worker = get_worker(dataset, config, app_config) + expected_configs_response = hub_datasets[name]["split_names_response"] + worker = get_worker( + dataset, + config, + app_config if use_token else replace(app_config, common=replace(app_config.common, hf_token=None)), + ) + if error_code is None: + result = worker.compute() + assert result == expected_configs_response + return + + with pytest.raises(CustomError) as exc_info: + worker.compute() + assert exc_info.value.code == error_code + if cause is None: + assert exc_info.value.disclose_cause is False + assert exc_info.value.cause_exception is None + else: + assert exc_info.value.disclose_cause is True + assert exc_info.value.cause_exception == cause + response = exc_info.value.as_response() + assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} + response_dict = dict(response) + # ^ to remove mypy warnings + assert response_dict["cause_exception"] == cause + assert isinstance(response_dict["cause_traceback"], list) + assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n"
8d890ea793959bb0847239e2de798b08aada4f8c
Sylvain Lesage
2023-01-26T08:59:34
ci: 🎡 launch CI when libcommon has been modified (#703)
diff --git a/.github/workflows/j-migration-build-docker.yml b/.github/workflows/j-migration-build-docker.yml index bf660447..a3ec4ff3 100644 --- a/.github/workflows/j-migration-build-docker.yml +++ b/.github/workflows/j-migration-build-docker.yml @@ -12,0 +13 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/j-migration.yml b/.github/workflows/j-migration.yml index df8f5f6f..b4a1c620 100644 --- a/.github/workflows/j-migration.yml +++ b/.github/workflows/j-migration.yml @@ -9,0 +10 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/s-admin-build-docker.yml b/.github/workflows/s-admin-build-docker.yml index 9bbff4ad..ce5298cd 100644 --- a/.github/workflows/s-admin-build-docker.yml +++ b/.github/workflows/s-admin-build-docker.yml @@ -8,0 +9 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/s-admin.yml b/.github/workflows/s-admin.yml index b8d73835..a1c05c5e 100644 --- a/.github/workflows/s-admin.yml +++ b/.github/workflows/s-admin.yml @@ -8,0 +9 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/s-api-build-docker.yml b/.github/workflows/s-api-build-docker.yml index 087faea1..913b5648 100644 --- a/.github/workflows/s-api-build-docker.yml +++ b/.github/workflows/s-api-build-docker.yml @@ -8,0 +9 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/s-api.yml b/.github/workflows/s-api.yml index 30eccd73..0323f6c6 100644 --- a/.github/workflows/s-api.yml +++ b/.github/workflows/s-api.yml @@ -8,0 +9 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/w-datasets_based-build-docker.yml b/.github/workflows/w-datasets_based-build-docker.yml index 32aa1007..b7a55326 100644 --- a/.github/workflows/w-datasets_based-build-docker.yml +++ b/.github/workflows/w-datasets_based-build-docker.yml @@ -8,0 +9 @@ on: + - 'libs/libcommon/**' diff --git a/.github/workflows/w-datasets_based.yml b/.github/workflows/w-datasets_based.yml index beb0c3af..b2f6aed2 100644 --- a/.github/workflows/w-datasets_based.yml +++ b/.github/workflows/w-datasets_based.yml @@ -8,0 +9 @@ on: + - 'libs/libcommon/**'
55063e17c1491955a594ba3422b3183adb12e66c
Sylvain Lesage
2023-01-25T14:22:01
Update hfh (#700)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 76614377..cecf827b 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-05a1740" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-5364f81" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-05a1740", - "api": "huggingface/datasets-server-services-api:sha-05a1740" + "admin": "huggingface/datasets-server-services-admin:sha-5364f81", + "api": "huggingface/datasets-server-services-api:sha-5364f81" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-05a1740" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-5364f81" diff --git a/e2e/poetry.lock b/e2e/poetry.lock index 711adadb..1455455b 100644 --- a/e2e/poetry.lock +++ b/e2e/poetry.lock @@ -212 +212 @@ name = "huggingface-hub" -version = "0.11.1" +version = "0.12.0" @@ -223 +223 @@ requests = "*" -tqdm = "*" +tqdm = ">=4.42.1" @@ -227 +227 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -229 +229 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -233 +233 @@ tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] @@ -744 +744 @@ python-versions = "3.9.15" -content-hash = "1221c5dd97d09acd12a4a388d83084e57194bced3c29c0a3e24215597bf75f4e" +content-hash = "244cb47bc1d16906470419a4f8e2ff371df9c4c22e10f452d6d62e05aae9a98d" @@ -908,2 +908,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, - {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, diff --git a/e2e/pyproject.toml b/e2e/pyproject.toml index cbd838f3..00393c3f 100644 --- a/e2e/pyproject.toml +++ b/e2e/pyproject.toml @@ -16 +16 @@ flake8 = "^3.9.2" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 920e5bf4..9946fee9 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -250 +250 @@ name = "huggingface-hub" -version = "0.11.1" +version = "0.12.0" @@ -261 +261 @@ requests = "*" -tqdm = "*" +tqdm = ">=4.42.1" @@ -265 +265 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -267 +267 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -271 +271 @@ tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] @@ -317 +317 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" @@ -1106,2 +1106,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, - {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, diff --git a/libs/libcommon/poetry.lock b/libs/libcommon/poetry.lock index d95eca95..a7b22256 100644 --- a/libs/libcommon/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -261 +261 @@ name = "huggingface-hub" -version = "0.11.1" +version = "0.12.0" @@ -272 +272 @@ requests = "*" -tqdm = "*" +tqdm = ">=4.42.1" @@ -276 +276 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -278 +278 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -282 +282 @@ tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] @@ -847 +847 @@ python-versions = "3.9.15" -content-hash = "f680ec6ec5d2a2e09adaaaf2f0f857e0bd89f22cd3d4b1de8c97f39402c872a3" +content-hash = "bcb7717006cb6327c70332dcf65e402f151fa0fdfd483c4a02a40380a87b544b" @@ -1076,2 +1076,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, - {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 2ac13c07..df1074c0 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -11 +11 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index b5b1f19c..8c023287 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -297 +297 @@ name = "huggingface-hub" -version = "0.11.1" +version = "0.12.0" @@ -308 +308 @@ requests = "*" -tqdm = "*" +tqdm = ">=4.42.1" @@ -312 +312 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -314 +314 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -318 +318 @@ tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] @@ -364 +364 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" @@ -996 +996 @@ python-versions = "3.9.15" -content-hash = "95fb7ecb3a6c7b3afb6cebd2e90a1f51a56ff86f934b17b748b396347a8d26fc" +content-hash = "6758d4c8d9fc7cc871367f0f715dc44aade5b2e4c66ba38894ec08b2c698612b" @@ -1237,2 +1237,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, - {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 8ea278cb..ad77eeda 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -21 +21 @@ flake8 = "^3.9.2" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 415b552e..24693e8d 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -297 +297 @@ name = "huggingface-hub" -version = "0.11.1" +version = "0.12.0" @@ -308 +308 @@ requests = "*" -tqdm = "*" +tqdm = ">=4.42.1" @@ -312 +312 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -314 +314 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -318 +318 @@ tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] @@ -380 +380 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" @@ -1030 +1030 @@ python-versions = "3.9.15" -content-hash = "e813683c7834053c461b39218802e309a1b4d6c69b179772c179bbfa9be292e4" +content-hash = "073a785848bf1132d6407c6b57b962ef52e7ec8752c9bbf165d7d772ca9ab6bb" @@ -1271,2 +1271,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, - {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 26dccaea..b65adfdd 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 46ab4ca1..84d8ca0b 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -735 +735 @@ name = "huggingface-hub" -version = "0.11.1" +version = "0.12.0" @@ -746 +746 @@ requests = "*" -tqdm = "*" +tqdm = ">=4.42.1" @@ -750 +750 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -752 +752 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -756 +756 @@ tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] @@ -886 +886 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" @@ -2483 +2483 @@ python-versions = "3.9.15" -content-hash = "ef513d79226c862daee0ed3bd16d812090441ea0cd41d93e82f185dbe765b0a6" +content-hash = "d813a3067825668630455a2c36c1f9b52d3704431cc41bc4860400f729e570de" @@ -3271,2 +3271,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, - {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, + {file = "huggingface_hub-0.12.0-py3-none-any.whl", hash = "sha256:93809eabbfb2058a808bddf8b2a70f645de3f9df73ce87ddf5163d4c74b71c0c"}, + {file = "huggingface_hub-0.12.0.tar.gz", hash = "sha256:da82c9ec8f9d8f976ffd3fd8249d20bb35c2dd3145a9f7ca1106f0ebefd9afa0"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 9fee7083..f638ed71 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -17 +17 @@ gdown = "^4.2.0" -huggingface-hub = "^0.11.0" +huggingface-hub = "^0.12.0" diff --git a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py index 7a0a554f..a2f9a8b5 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py @@ -591 +591,5 @@ def compute_parquet_and_dataset_info_response( - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + refs = hf_api.list_repo_refs(repo_id=dataset, repo_type=DATASET_TYPE) + if all(ref.ref != target_revision for ref in refs.converts): + committer_hf_api.create_branch( + repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE, revision=source_revision + ) @@ -594,4 +597,0 @@ def compute_parquet_and_dataset_info_response( - except RevisionNotFoundError: - # create the parquet_ref (refs/convert/parquet) - committer_hf_api.create_branch(repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE) - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) @@ -600,0 +601 @@ def compute_parquet_and_dataset_info_response( + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False)
02ca8982643652c99ca5b8a4d17c73313c84f9f0
Sylvain Lesage
2023-01-25T10:33:36
refactor: 💡 set libcommon as an "editable" dependency (#699)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 509e1adb..76614377 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-0abd269" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-05a1740" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-0abd269", - "api": "huggingface/datasets-server-services-api:sha-0abd269" + "admin": "huggingface/datasets-server-services-admin:sha-05a1740", + "api": "huggingface/datasets-server-services-api:sha-05a1740" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-0abd269" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-05a1740" diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile index ea7c97a6..2f3f7475 100644 --- a/jobs/mongodb_migration/Dockerfile +++ b/jobs/mongodb_migration/Dockerfile @@ -26 +26 @@ COPY jobs/mongodb_migration/pyproject.toml ./jobs/mongodb_migration/pyproject.to -COPY libs/libcommon/dist ./libs/libcommon/dist +COPY libs/libcommon ./libs/libcommon diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index dc2e516b..920e5bf4 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -311 +311,2 @@ optional = false -python-versions = "==3.9.15" +python-versions = "3.9.15" +develop = true @@ -314,3 +315,3 @@ python-versions = "==3.9.15" -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -huggingface-hub = ">=0.11.0,<0.12.0" +appdirs = "^1.4.4" +environs = "^9.5.0" +huggingface-hub = "^0.11.0" @@ -318,4 +319,4 @@ mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -orjson = ">=3.6.4,<4.0.0" -psutil = ">=5.9.2,<6.0.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} +mongoengine = "^0.24.1" +orjson = "^3.6.4" +psutil = "^5.9.2" +pymongo = {version = "^3.13.0", extras = ["srv"]} @@ -324,2 +325,2 @@ pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" +type = "directory" +url = "../../libs/libcommon" @@ -880 +881 @@ python-versions = "3.9.15" -content-hash = "9cc914c0fdedb940ea8057a97966e733540449cf038518036ef5708de22d2a2e" +content-hash = "fb2aa3c18a66fff5bdf26107bb6265120efb94a66e99ae8507aea1d6bc9def0c" @@ -1120,3 +1121 @@ isort = [ -libcommon = [ - {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, -] +libcommon = [] diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index e4a21a0e..ddc39bda 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } +libcommon = {path = "../../libs/libcommon", develop = true} diff --git a/libs/libcommon/dist/libcommon-0.3.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.0-py3-none-any.whl deleted file mode 100644 index d379c233..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.0.tar.gz b/libs/libcommon/dist/libcommon-0.3.0.tar.gz deleted file mode 100644 index 23972716..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.0.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl deleted file mode 100644 index 22a2e7b7..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.1.tar.gz b/libs/libcommon/dist/libcommon-0.3.1.tar.gz deleted file mode 100644 index 69940fdf..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.1.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl deleted file mode 100644 index 5517358f..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.2.tar.gz b/libs/libcommon/dist/libcommon-0.3.2.tar.gz deleted file mode 100644 index 5e78bfef..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.2.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl deleted file mode 100644 index 8dd8d15d..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.3.tar.gz b/libs/libcommon/dist/libcommon-0.3.3.tar.gz deleted file mode 100644 index b363e0c7..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.3.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.4-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.4-py3-none-any.whl deleted file mode 100644 index ad4f6645..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.3.4.tar.gz b/libs/libcommon/dist/libcommon-0.3.4.tar.gz deleted file mode 100644 index 12a032b8..00000000 Binary files a/libs/libcommon/dist/libcommon-0.3.4.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.0-py3-none-any.whl deleted file mode 100644 index 3d3caa5d..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.0.tar.gz b/libs/libcommon/dist/libcommon-0.4.0.tar.gz deleted file mode 100644 index 5b801885..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.0.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.1-py3-none-any.whl deleted file mode 100644 index 84ef4ebe..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.1.tar.gz b/libs/libcommon/dist/libcommon-0.4.1.tar.gz deleted file mode 100644 index 2f4967b4..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.1.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.2-py3-none-any.whl deleted file mode 100644 index decc0817..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.2.tar.gz b/libs/libcommon/dist/libcommon-0.4.2.tar.gz deleted file mode 100644 index 7d41e1d1..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.2.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.3-py3-none-any.whl deleted file mode 100644 index f64d9d60..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.4.3.tar.gz b/libs/libcommon/dist/libcommon-0.4.3.tar.gz deleted file mode 100644 index 870b19e3..00000000 Binary files a/libs/libcommon/dist/libcommon-0.4.3.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl deleted file mode 100644 index ec1e021a..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.0.tar.gz b/libs/libcommon/dist/libcommon-0.5.0.tar.gz deleted file mode 100644 index ba909997..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.0.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl deleted file mode 100644 index c23d6a99..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.1.tar.gz b/libs/libcommon/dist/libcommon-0.5.1.tar.gz deleted file mode 100644 index ba741fee..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.1.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl deleted file mode 100644 index 2d75199a..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.10.tar.gz b/libs/libcommon/dist/libcommon-0.5.10.tar.gz deleted file mode 100644 index 90599cb7..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.10.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl deleted file mode 100644 index 81bba9af..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.11.tar.gz b/libs/libcommon/dist/libcommon-0.5.11.tar.gz deleted file mode 100644 index 32af840e..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.11.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl deleted file mode 100644 index 7e28167d..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.2.tar.gz b/libs/libcommon/dist/libcommon-0.5.2.tar.gz deleted file mode 100644 index b5da8cc9..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.2.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl deleted file mode 100644 index efa4b510..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.3.tar.gz b/libs/libcommon/dist/libcommon-0.5.3.tar.gz deleted file mode 100644 index d28c2ff6..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.3.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.4-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.4-py3-none-any.whl deleted file mode 100644 index 5b508cbc..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.4.tar.gz b/libs/libcommon/dist/libcommon-0.5.4.tar.gz deleted file mode 100644 index 828a1f83..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.4.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.5-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.5-py3-none-any.whl deleted file mode 100644 index cec63769..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.5-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.5.tar.gz b/libs/libcommon/dist/libcommon-0.5.5.tar.gz deleted file mode 100644 index 236fde04..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.5.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.6-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.6-py3-none-any.whl deleted file mode 100644 index de99fdc8..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.6-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.6.tar.gz b/libs/libcommon/dist/libcommon-0.5.6.tar.gz deleted file mode 100644 index 1f9b059f..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.6.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.7-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.7-py3-none-any.whl deleted file mode 100644 index a77d211c..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.7-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.7.tar.gz b/libs/libcommon/dist/libcommon-0.5.7.tar.gz deleted file mode 100644 index 843c381e..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.7.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.8-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.8-py3-none-any.whl deleted file mode 100644 index ca1c0248..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.8-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.8.tar.gz b/libs/libcommon/dist/libcommon-0.5.8.tar.gz deleted file mode 100644 index 3c12d1aa..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.8.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.9-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.9-py3-none-any.whl deleted file mode 100644 index 74f3db82..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.9-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.5.9.tar.gz b/libs/libcommon/dist/libcommon-0.5.9.tar.gz deleted file mode 100644 index d53f39e4..00000000 Binary files a/libs/libcommon/dist/libcommon-0.5.9.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl deleted file mode 100644 index f4edf7ee..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.0.tar.gz b/libs/libcommon/dist/libcommon-0.6.0.tar.gz deleted file mode 100644 index 5ce5b739..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.0.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl deleted file mode 100644 index 7f2e2966..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.1.tar.gz b/libs/libcommon/dist/libcommon-0.6.1.tar.gz deleted file mode 100644 index c6ce3880..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.1.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.2-py3-none-any.whl deleted file mode 100644 index 8f4748dd..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.2.tar.gz b/libs/libcommon/dist/libcommon-0.6.2.tar.gz deleted file mode 100644 index 75a67342..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.2.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.3-py3-none-any.whl deleted file mode 100644 index d655f2a3..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.3.tar.gz b/libs/libcommon/dist/libcommon-0.6.3.tar.gz deleted file mode 100644 index 05e8b2b7..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.3.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl deleted file mode 100644 index 281e35c4..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.4.tar.gz b/libs/libcommon/dist/libcommon-0.6.4.tar.gz deleted file mode 100644 index ed1c82ac..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.4.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.5-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.5-py3-none-any.whl deleted file mode 100644 index 876ea6f7..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.5-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.5.tar.gz b/libs/libcommon/dist/libcommon-0.6.5.tar.gz deleted file mode 100644 index 0b1a3ea8..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.5.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl deleted file mode 100644 index 3098e18b..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.6.tar.gz b/libs/libcommon/dist/libcommon-0.6.6.tar.gz deleted file mode 100644 index 9b5ff70a..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.6.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl deleted file mode 100644 index 64a5f2f5..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.7.tar.gz b/libs/libcommon/dist/libcommon-0.6.7.tar.gz deleted file mode 100644 index f9b60e78..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.7.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl deleted file mode 100644 index b0751216..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libcommon-0.6.8.tar.gz b/libs/libcommon/dist/libcommon-0.6.8.tar.gz deleted file mode 100644 index a63f5208..00000000 Binary files a/libs/libcommon/dist/libcommon-0.6.8.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.0-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.0-py3-none-any.whl deleted file mode 100644 index adac4444..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.0.tar.gz b/libs/libcommon/dist/libutils-0.1.0.tar.gz deleted file mode 100644 index f421b079..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.0.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.1-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.1-py3-none-any.whl deleted file mode 100644 index 80357c1c..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.1.tar.gz b/libs/libcommon/dist/libutils-0.1.1.tar.gz deleted file mode 100644 index 015488c2..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.1.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.10-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.10-py3-none-any.whl deleted file mode 100644 index 875f516b..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.10-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.10.tar.gz b/libs/libcommon/dist/libutils-0.1.10.tar.gz deleted file mode 100644 index 223578fd..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.10.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.11-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.11-py3-none-any.whl deleted file mode 100644 index b0c9d3c3..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.11-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.11.tar.gz b/libs/libcommon/dist/libutils-0.1.11.tar.gz deleted file mode 100644 index 2248ef9e..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.11.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.2-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.2-py3-none-any.whl deleted file mode 100644 index f4dfb865..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.2.tar.gz b/libs/libcommon/dist/libutils-0.1.2.tar.gz deleted file mode 100644 index 3d6577ac..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.2.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.3-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.3-py3-none-any.whl deleted file mode 100644 index 513bef32..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.3.tar.gz b/libs/libcommon/dist/libutils-0.1.3.tar.gz deleted file mode 100644 index 0aa95393..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.3.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.4-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.4-py3-none-any.whl deleted file mode 100644 index 8a3fbcd5..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.4.tar.gz b/libs/libcommon/dist/libutils-0.1.4.tar.gz deleted file mode 100644 index 6df7e684..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.4.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.5-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.5-py3-none-any.whl deleted file mode 100644 index 1ef97238..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.5-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.5.tar.gz b/libs/libcommon/dist/libutils-0.1.5.tar.gz deleted file mode 100644 index 0cf0f36f..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.5.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.6-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.6-py3-none-any.whl deleted file mode 100644 index 234314eb..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.6-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.6.tar.gz b/libs/libcommon/dist/libutils-0.1.6.tar.gz deleted file mode 100644 index 8fa0adaf..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.6.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.7-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.7-py3-none-any.whl deleted file mode 100644 index cf005141..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.7-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.7.tar.gz b/libs/libcommon/dist/libutils-0.1.7.tar.gz deleted file mode 100644 index 49336171..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.7.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.8-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.8-py3-none-any.whl deleted file mode 100644 index e52fdb33..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.8-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.8.tar.gz b/libs/libcommon/dist/libutils-0.1.8.tar.gz deleted file mode 100644 index 5a7e2e66..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.8.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.9-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.9-py3-none-any.whl deleted file mode 100644 index d00447c0..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.9-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.1.9.tar.gz b/libs/libcommon/dist/libutils-0.1.9.tar.gz deleted file mode 100644 index b948a753..00000000 Binary files a/libs/libcommon/dist/libutils-0.1.9.tar.gz and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.2.0-py3-none-any.whl b/libs/libcommon/dist/libutils-0.2.0-py3-none-any.whl deleted file mode 100644 index ae555690..00000000 Binary files a/libs/libcommon/dist/libutils-0.2.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcommon/dist/libutils-0.2.0.tar.gz b/libs/libcommon/dist/libutils-0.2.0.tar.gz deleted file mode 100644 index f9ad20de..00000000 Binary files a/libs/libcommon/dist/libutils-0.2.0.tar.gz and /dev/null differ diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index 344b510b..e249eaca 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -26 +26 @@ COPY services/admin/pyproject.toml ./services/admin/pyproject.toml -COPY libs/libcommon/dist ./libs/libcommon/dist +COPY libs/libcommon ./libs/libcommon diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index b54b9aa4..b5b1f19c 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -358 +358,2 @@ optional = false -python-versions = "==3.9.15" +python-versions = "3.9.15" +develop = true @@ -361,3 +362,3 @@ python-versions = "==3.9.15" -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -huggingface-hub = ">=0.11.0,<0.12.0" +appdirs = "^1.4.4" +environs = "^9.5.0" +huggingface-hub = "^0.11.0" @@ -365,4 +366,4 @@ mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -orjson = ">=3.6.4,<4.0.0" -psutil = ">=5.9.2,<6.0.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} +mongoengine = "^0.24.1" +orjson = "^3.6.4" +psutil = "^5.9.2" +pymongo = {version = "^3.13.0", extras = ["srv"]} @@ -371,2 +372,2 @@ pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" +type = "directory" +url = "../../libs/libcommon" @@ -995 +996 @@ python-versions = "3.9.15" -content-hash = "ca56fd792b28501ba816d6bc44df28efacaf8ea43839c77d39c920c3c0e8913a" +content-hash = "95fb7ecb3a6c7b3afb6cebd2e90a1f51a56ff86f934b17b748b396347a8d26fc" @@ -1251,3 +1252 @@ isort = [ -libcommon = [ - {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, -] +libcommon = [] diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index b29afef4..8ea278cb 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } +libcommon = {path = "../../libs/libcommon", develop = true} diff --git a/services/api/Dockerfile b/services/api/Dockerfile index 4201e26e..33654934 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -26 +26 @@ COPY services/api/pyproject.toml ./services/api/pyproject.toml -COPY libs/libcommon/dist ./libs/libcommon/dist +COPY libs/libcommon ./libs/libcommon diff --git a/services/api/poetry.lock b/services/api/poetry.lock index d48e8770..415b552e 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -374 +374,2 @@ optional = false -python-versions = "==3.9.15" +python-versions = "3.9.15" +develop = true @@ -377,3 +378,3 @@ python-versions = "==3.9.15" -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -huggingface-hub = ">=0.11.0,<0.12.0" +appdirs = "^1.4.4" +environs = "^9.5.0" +huggingface-hub = "^0.11.0" @@ -381,4 +382,4 @@ mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -orjson = ">=3.6.4,<4.0.0" -psutil = ">=5.9.2,<6.0.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} +mongoengine = "^0.24.1" +orjson = "^3.6.4" +psutil = "^5.9.2" +pymongo = {version = "^3.13.0", extras = ["srv"]} @@ -387,2 +388,2 @@ pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" +type = "directory" +url = "../../libs/libcommon" @@ -1029 +1030 @@ python-versions = "3.9.15" -content-hash = "9fc9f9e46fd5dc0a83603f51225495b7d9b86ff64f6b1481d379a498fa44d522" +content-hash = "e813683c7834053c461b39218802e309a1b4d6c69b179772c179bbfa9be292e4" @@ -1289,3 +1290 @@ jsonschema = [ -libcommon = [ - {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, -] +libcommon = [] diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 880a093e..26dccaea 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } +libcommon = {path = "../../libs/libcommon", develop = true} diff --git a/tools/Python.mk b/tools/Python.mk index d03e0205..f950eac5 100644 --- a/tools/Python.mk +++ b/tools/Python.mk @@ -13,4 +12,0 @@ lock: -.PHONY: build -build: - poetry build - diff --git a/workers/datasets_based/Dockerfile b/workers/datasets_based/Dockerfile index 39ee8c95..a8718326 100644 --- a/workers/datasets_based/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -30 +30 @@ COPY workers/datasets_based/pyproject.toml ./workers/datasets_based/pyproject.to -COPY libs/libcommon/dist ./libs/libcommon/dist +COPY libs/libcommon ./libs/libcommon diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index cb5ced04..46ab4ca1 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -880 +880,2 @@ optional = false -python-versions = "==3.9.15" +python-versions = "3.9.15" +develop = true @@ -883,3 +884,3 @@ python-versions = "==3.9.15" -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -huggingface-hub = ">=0.11.0,<0.12.0" +appdirs = "^1.4.4" +environs = "^9.5.0" +huggingface-hub = "^0.11.0" @@ -887,4 +888,4 @@ mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -orjson = ">=3.6.4,<4.0.0" -psutil = ">=5.9.2,<6.0.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} +mongoengine = "^0.24.1" +orjson = "^3.6.4" +psutil = "^5.9.2" +pymongo = {version = "^3.13.0", extras = ["srv"]} @@ -893,2 +894,2 @@ pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" +type = "directory" +url = "../../libs/libcommon" @@ -2482 +2483 @@ python-versions = "3.9.15" -content-hash = "9f6679bc511c61a3619696cafabd8c404ca8cecee25d1e23ea7903daea4cca3e" +content-hash = "ef513d79226c862daee0ed3bd16d812090441ea0cd41d93e82f185dbe765b0a6" @@ -3385,3 +3386 @@ libclang = [ -libcommon = [ - {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, -] +libcommon = [] diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 072f7354..9fee7083 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } +libcommon = {path = "../../libs/libcommon", develop = true}
f320f88da09a51d9b71d3f1709e881e8fcece7af
Sylvain Lesage
2023-01-25T09:10:26
feat: 🎸 block more datasets in /parquet-and-dataset-info (#698)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 98fe47c2..4a8c5b93 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -216 +216 @@ parquetAndDatasetInfo: - blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers,bnl_newspapers,multilingual_librispeech,openslr,GEM/BiSECT,Graphcore/gqa,SaulLu/Natural_Questions_HTML_reduced_all,ccdv/cnn_dailymail,mozilla-foundation/common_voice_1_0,huggan/anime-faces,Biomedical-TeMU/ProfNER_corpus_NER,MorVentura/TRBLLmaker,student/celebA,Rodion/uno_sustainable_development_goals,Nart/parallel-ab-ru,HuggingFaceM4/VQAv2,mesolitica/noisy-ms-en-augmentation,nateraw/rice-image-dataset,tensorcat/wikipedia-japanese,angelolab/ark_example,RAYZ/Mixed-Dia,ywchoi/mdpi_sept10,TomTBT/pmc_open_access_figure,society-ethics/lila_camera_traps,autoevaluator/shoes-vs-sandals-vs-boots,cjvt/slo_collocations,parambharat/mile_dataset,rossevine/tesis,ksaml/Stanford_dogs,nuprl/MultiPL-E-raw-data,ZihaoLin/zhlds,ACL-OCL/acl-anthology-corpus,mozilla-foundation/common_voice_2_0,Biomedical-TeMU/SPACCC_Sentence-Splitter,nateraw/rice-image-dataset-2,mesolitica/noisy-en-ms-augmentation,bigbio/ctebmsp,bigbio/distemist,nlphuji/vasr,parambharat/malayalam_asr_corpus,cjvt/sloleks,DavidVivancos/MindBigData2022_Imagenet_IN_Spct,KokeCacao/oracle,keremberke/nfl-object-detection,lafi23333/ds,Lykon/OnePiece,kaliansh/sdaia,sil-ai/audio-kw-in-context,andite/riyo-tag,ilhanemirhan/eee543,backslashlim/LoRA-Datasets,hr16/Miwano-Rag,ccdv/mediasum,mozilla-foundation/common_voice_3_0,mozilla-foundation/common_voice_4_0,bigbio/ebm_pico,parambharat/kannada_asr_corpus,parambharat/telugu_asr_corpus,Abuelnour/json_1000_Scientific_Paper,reazon-research/reazonspeech,shunk031/livedoor-news-corpus,mesolitica/translated-SQUAD,SamAct/medium_cleaned,EfaceD/ElysiumInspirations,cahya/fleurs,guangguang/azukijpg,genjib/LAVISHData,rohitp1/librispeech_asr_clean,azraahmadi/autotrain-data-xraydatasetp2,HuggingFaceM4/COCO,bio-datasets/e3c,nateraw/auto-cats-and-dogs,keremberke/smoke-object-detection,ds4sd/DocLayNet,nlphuji/utk_faces,corentinm7/MyoQuant-SDH-Data,xglue,grasshoff/lhc_sents,HugoLaurencon/IIIT-5K,alkzar90/CC6204-Hackaton-Cub-Dataset,RaphaelOlivier/whisper_adversarial_examples,bruno-cotrim/arch-max,keshan/multispeaker-tts-sinhala,Tevatron/beir-corpus,fcakyon/gun-object-detection,ccdv/arxiv-summarization" + blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers,bnl_newspapers,multilingual_librispeech,openslr,GEM/BiSECT,Graphcore/gqa,SaulLu/Natural_Questions_HTML_reduced_all,ccdv/cnn_dailymail,mozilla-foundation/common_voice_1_0,huggan/anime-faces,Biomedical-TeMU/ProfNER_corpus_NER,MorVentura/TRBLLmaker,student/celebA,Rodion/uno_sustainable_development_goals,Nart/parallel-ab-ru,HuggingFaceM4/VQAv2,mesolitica/noisy-ms-en-augmentation,nateraw/rice-image-dataset,tensorcat/wikipedia-japanese,angelolab/ark_example,RAYZ/Mixed-Dia,ywchoi/mdpi_sept10,TomTBT/pmc_open_access_figure,society-ethics/lila_camera_traps,autoevaluator/shoes-vs-sandals-vs-boots,cjvt/slo_collocations,parambharat/mile_dataset,rossevine/tesis,ksaml/Stanford_dogs,nuprl/MultiPL-E-raw-data,ZihaoLin/zhlds,ACL-OCL/acl-anthology-corpus,mozilla-foundation/common_voice_2_0,Biomedical-TeMU/SPACCC_Sentence-Splitter,nateraw/rice-image-dataset-2,mesolitica/noisy-en-ms-augmentation,bigbio/ctebmsp,bigbio/distemist,nlphuji/vasr,parambharat/malayalam_asr_corpus,cjvt/sloleks,DavidVivancos/MindBigData2022_Imagenet_IN_Spct,KokeCacao/oracle,keremberke/nfl-object-detection,lafi23333/ds,Lykon/OnePiece,kaliansh/sdaia,sil-ai/audio-kw-in-context,andite/riyo-tag,ilhanemirhan/eee543,backslashlim/LoRA-Datasets,hr16/Miwano-Rag,ccdv/mediasum,mozilla-foundation/common_voice_3_0,mozilla-foundation/common_voice_4_0,bigbio/ebm_pico,parambharat/kannada_asr_corpus,parambharat/telugu_asr_corpus,Abuelnour/json_1000_Scientific_Paper,reazon-research/reazonspeech,shunk031/livedoor-news-corpus,mesolitica/translated-SQUAD,SamAct/medium_cleaned,EfaceD/ElysiumInspirations,cahya/fleurs,guangguang/azukijpg,genjib/LAVISHData,rohitp1/librispeech_asr_clean,azraahmadi/autotrain-data-xraydatasetp2,HuggingFaceM4/COCO,bio-datasets/e3c,nateraw/auto-cats-and-dogs,keremberke/smoke-object-detection,ds4sd/DocLayNet,nlphuji/utk_faces,corentinm7/MyoQuant-SDH-Data,xglue,grasshoff/lhc_sents,HugoLaurencon/IIIT-5K,alkzar90/CC6204-Hackaton-Cub-Dataset,RaphaelOlivier/whisper_adversarial_examples,bruno-cotrim/arch-max,keshan/multispeaker-tts-sinhala,Tevatron/beir-corpus,fcakyon/gun-object-detection,ccdv/arxiv-summarization,keremberke/protective-equipment-detection,mozilla-foundation/common_voice_5_0,nlphuji/winogavil,Poupou/Gitcoin-Grant-DataBuilder,orieg/elsevier-oa-cc-by,castorini/msmarco_v1_passage_doc2query-t5_expansions,inseq/divemt_attributions,crystina-z/msmarco-passage-dl19,mozilla-foundation/common_voice_5_1,matchbench/dbp15k-fr-en,keremberke/garbage-object-detection,crystina-z/no-nonself-mrtydi,ashraq/dhivehi-corpus,zyznull/dureader-retrieval-ranking,zyznull/msmarco-passage-corpus,zyznull/msmarco-passage-ranking,Tevatron/wikipedia-squad,Tevatron/wikipedia-trivia-corpus,NeuroSenko/senko_anime_full,plncmm/wl-disease,plncmm/wl-family-member"
059cb3029567c320f363263856f1725491bec665
Sylvain Lesage
2023-01-25T09:05:46
feat: 🎸 reduce logs level from DEBUG to INFO (#697)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 03fb282e..98fe47c2 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -73 +73 @@ common: - logLevel: "DEBUG" + logLevel: "INFO"
c40fe808708feff76d1adb8936c5ff0b1a0b521c
Sylvain Lesage
2023-01-23T23:09:59
Add a new route: /cache-reports-with-content (#696)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 1a5df9db..509e1adb 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-9ab3e6e" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-0abd269" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-9ab3e6e", - "api": "huggingface/datasets-server-services-api:sha-9ab3e6e" + "admin": "huggingface/datasets-server-services-admin:sha-0abd269", + "api": "huggingface/datasets-server-services-api:sha-0abd269" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-41c7e41" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-0abd269" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 73365ec5..03fb282e 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -139,0 +140,2 @@ admin: + # Number of reports in /cache-reports-with-content/... endpoints + cacheReportsWithContentNumResults: 100 @@ -199 +201 @@ firstRows: - maxJobsPerNamespace: 10 + maxJobsPerNamespace: 4 @@ -203 +205 @@ firstRows: - replicas: 80 + replicas: 24 @@ -225 +227 @@ parquetAndDatasetInfo: - replicas: 55 + replicas: 24 diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index ff5e60eb..808c63f8 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -17,0 +18,2 @@ + - name: ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS + value: {{ .Values.admin.cacheReportsWithContentNumResults | quote }} diff --git a/chart/values.yaml b/chart/values.yaml index 313687f7..9c6e2673 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -150,0 +151,2 @@ admin: + # Number of reports in /cache-reports-with-content/... endpoints + cacheReportsWithContentNumResults: 100 diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 132432cf..dc2e516b 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -307 +307 @@ name = "libcommon" -version = "0.6.7" +version = "0.6.8" @@ -325 +325 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" @@ -880 +880 @@ python-versions = "3.9.15" -content-hash = "9d0680f7c12a357489e3ebf925e9e035430cd5ceebded0f0288ca82a461d0315" +content-hash = "9cc914c0fdedb940ea8057a97966e733540449cf038518036ef5708de22d2a2e" @@ -1121 +1121 @@ libcommon = [ - {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, + {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 8832f87f..e4a21a0e 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } diff --git a/libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl new file mode 100644 index 00000000..b0751216 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.8.tar.gz b/libs/libcommon/dist/libcommon-0.6.8.tar.gz new file mode 100644 index 00000000..a63f5208 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.8.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 728c9043..2ac13c07 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.6.7" +version = "0.6.8" diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index bd2e0e59..4420bf50 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -127 +127 @@ class Job(Document): - ("status", "type", "created_at", "namespace"), + ("status", "type", "created_at", "namespace", "unicity_id"), diff --git a/libs/libcommon/src/libcommon/simple_cache.py b/libs/libcommon/src/libcommon/simple_cache.py index 09723ee0..f103e286 100644 --- a/libs/libcommon/src/libcommon/simple_cache.py +++ b/libs/libcommon/src/libcommon/simple_cache.py @@ -254 +254 @@ def get_responses_count_by_kind_status_and_error_code() -> List[CountEntry]: -class ResponseReport(TypedDict): +class CacheReport(TypedDict): @@ -265,2 +265,2 @@ class ResponseReport(TypedDict): -class CacheReport(TypedDict): - cache_reports: List[ResponseReport] +class CacheReportsPage(TypedDict): + cache_reports: List[CacheReport] @@ -278 +278 @@ class InvalidLimit(Exception): -def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheReport: +def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheReportsPage: @@ -288 +288 @@ def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheRepo - An opaque string value representing a pointer to a specific FirstRowsResponse item in the dataset. The + An opaque string value representing a pointer to a specific CachedResponse item in the dataset. The @@ -294 +294 @@ def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheRepo - [`CacheReport`]: A dict with the list of reports and the next cursor. The next cursor is + [`CacheReportsPage`]: A dict with the list of reports and the next cursor. The next cursor is @@ -345,0 +346,85 @@ def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheRepo +class CacheReportWithContent(CacheReport): + content: Mapping[str, Any] + details: Mapping[str, Any] + updated_at: datetime + + +class CacheReportsWithContentPage(TypedDict): + cache_reports_with_content: List[CacheReportWithContent] + next_cursor: str + + +def get_cache_reports_with_content(kind: str, cursor: Optional[str], limit: int) -> CacheReportsWithContentPage: + """ + Get a list of the cache report with content, along with the next cursor. + See https://solovyov.net/blog/2020/api-pagination-design/. + + The cache reports contain all the fields of the object, including the "content" field. + + Args: + kind (str): the kind of the cache entries + cursor (`str`): + An opaque string value representing a pointer to a specific CachedResponse item in the dataset. The + server returns results after the given pointer. + An empty string means to start from the beginning. + limit (strictly positive `int`): + The maximum number of results. + Returns: + [`CacheReportsWithContentPage`]: A dict with the list of reports and the next cursor. The next cursor is + an empty string if there are no more items to be fetched. + <Tip> + Raises the following errors: + - [`~libcommon.simple_cache.InvalidCursor`] + If the cursor is invalid. + - [`~libcommon.simple_cache.InvalidLimit`] + If the limit is an invalid number. + </Tip> + """ + if not cursor: + queryset = CachedResponse.objects(kind=kind) + else: + try: + queryset = CachedResponse.objects(kind=kind, id__gt=ObjectId(cursor)) + except InvalidId as err: + raise InvalidCursor("Invalid cursor.") from err + if limit <= 0: + raise InvalidLimit("Invalid limit.") + objects = list( + queryset.order_by("+id") + .only( + "id", + "kind", + "dataset", + "config", + "split", + "http_status", + "error_code", + "content", + "worker_version", + "dataset_git_revision", + "details", + "updated_at", + ) + .limit(limit) + ) + return { + "cache_reports_with_content": [ + { + "kind": kind, + "dataset": object.dataset, + "config": object.config, + "split": object.split, + "http_status": object.http_status.value, + "error_code": object.error_code, + "content": object.content, + "worker_version": object.worker_version, + "dataset_git_revision": object.dataset_git_revision, + "details": object.details, + "updated_at": object.updated_at, + } + for object in objects + ], + "next_cursor": "" if len(objects) < limit else str(objects[-1].id), + } + + diff --git a/libs/libcommon/tests/test_simple_cache.py b/libs/libcommon/tests/test_simple_cache.py index 012d6326..71bf609d 100644 --- a/libs/libcommon/tests/test_simple_cache.py +++ b/libs/libcommon/tests/test_simple_cache.py @@ -3,0 +4 @@ +from datetime import datetime @@ -21,0 +23 @@ from libcommon.simple_cache import ( + get_cache_reports_with_content, @@ -381,0 +384,4 @@ def test_get_cache_reports() -> None: + assert get_cache_reports_with_content(kind=kind, cursor="", limit=2) == { + "cache_reports_with_content": [], + "next_cursor": "", + } @@ -468 +473,0 @@ def test_get_cache_reports() -> None: - @@ -485,0 +491,57 @@ def test_get_cache_reports() -> None: + response_with_content = get_cache_reports_with_content(kind=kind, cursor="", limit=2) + # redact the response to make it simpler to compare with the expected + REDACTED_DATE = datetime(2020, 1, 1, 0, 0, 0) + for c in response_with_content["cache_reports_with_content"]: + c["updated_at"] = REDACTED_DATE + assert response_with_content["cache_reports_with_content"] == [ + { + "kind": kind, + "dataset": dataset_a, + "config": None, + "split": None, + "http_status": http_status_a.value, + "error_code": None, + "content": content_a, + "worker_version": None, + "dataset_git_revision": None, + "details": {}, + "updated_at": REDACTED_DATE, + }, + { + "kind": kind, + "dataset": dataset_b, + "config": config_b, + "split": None, + "http_status": http_status_b.value, + "error_code": error_code_b, + "content": content_b, + "worker_version": worker_version_b, + "dataset_git_revision": dataset_git_revision_b, + "details": details_b, + "updated_at": REDACTED_DATE, + }, + ] + assert response_with_content["next_cursor"] != "" + next_cursor = response_with_content["next_cursor"] + response_with_content = get_cache_reports_with_content(kind=kind, cursor=next_cursor, limit=2) + for c in response_with_content["cache_reports_with_content"]: + c["updated_at"] = REDACTED_DATE + assert response_with_content == { + "cache_reports_with_content": [ + { + "kind": kind, + "dataset": dataset_c, + "config": config_c, + "split": split_c, + "http_status": http_status_c.value, + "error_code": error_code_c, + "content": content_c, + "worker_version": None, + "dataset_git_revision": None, + "details": details_c, + "updated_at": REDACTED_DATE, + }, + ], + "next_cursor": "", + } + diff --git a/services/admin/README.md b/services/admin/README.md index 72a96815..cd84cf2b 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -14,0 +15 @@ Set environment variables to configure the application (`ADMIN_` prefix): +- `ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS`: the number of results in /cache-reports-with-content/... endpoints. Defaults to `100`. @@ -40,0 +42 @@ The admin service provides endpoints: +- `/cache-reports-with-content/{processing_step}`: give detailed reports on the content of the cache for a processing step, including the content itself, which can be heavy diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index c95283f0..b54b9aa4 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -354 +354 @@ name = "libcommon" -version = "0.6.7" +version = "0.6.8" @@ -372 +372 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" @@ -995 +995 @@ python-versions = "3.9.15" -content-hash = "b6b99db1d8a2bfabdfff688753c5845fd48ec28f11b47a9ed9bae2f5d730cccd" +content-hash = "ca56fd792b28501ba816d6bc44df28efacaf8ea43839c77d39c920c3c0e8913a" @@ -1252 +1252 @@ libcommon = [ - {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, + {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index dc9fabe8..b29afef4 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 7be2df98..6a7665bd 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -14,0 +15,3 @@ from admin.routes.cache_reports import create_cache_reports_endpoint +from admin.routes.cache_reports_with_content import ( + create_cache_reports_with_content_endpoint, +) @@ -77,0 +81,13 @@ def create_app() -> Starlette: + + [ + Route( + f"/cache-reports-with-content{processing_step.endpoint}", + endpoint=create_cache_reports_with_content_endpoint( + processing_step=processing_step, + cache_reports_with_content_num_results=app_config.admin.cache_reports_with_content_num_results, + max_age=app_config.admin.max_age, + external_auth_url=app_config.external_auth_url, + organization=app_config.admin.hf_organization, + ), + ) + for processing_step in processing_steps + ] diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index d2410b3d..1d4f271a 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -38,0 +39 @@ ADMIN_CACHE_REPORTS_NUM_RESULTS = 100 +ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS = 100 @@ -46,0 +48 @@ class AdminConfig: + cache_reports_with_content_num_results: int = ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS @@ -58,0 +61,3 @@ class AdminConfig: + cache_reports_with_content_num_results=env.int( + name="CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS", default=ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS + ), diff --git a/services/admin/src/admin/routes/cache_reports_with_content.py b/services/admin/src/admin/routes/cache_reports_with_content.py new file mode 100644 index 00000000..45798df8 --- /dev/null +++ b/services/admin/src/admin/routes/cache_reports_with_content.py @@ -0,0 +1,58 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from libcommon.processing_graph import ProcessingStep +from libcommon.simple_cache import ( + InvalidCursor, + InvalidLimit, + get_cache_reports_with_content, +) +from starlette.requests import Request +from starlette.responses import Response + +from admin.authentication import auth_check +from admin.utils import ( + AdminCustomError, + Endpoint, + InvalidParameterError, + UnexpectedError, + get_json_admin_error_response, + get_json_ok_response, +) + + +def create_cache_reports_with_content_endpoint( + processing_step: ProcessingStep, + cache_reports_with_content_num_results: int, + max_age: int, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, +) -> Endpoint: + async def cache_reports_with_content_endpoint(request: Request) -> Response: + try: + cursor = request.query_params.get("cursor") or "" + logging.info(f"Cache reports with content for {processing_step.cache_kind}, cursor={cursor}") + # if auth_check fails, it will raise an exception that will be caught below + auth_check(external_auth_url=external_auth_url, request=request, organization=organization) + try: + return get_json_ok_response( + get_cache_reports_with_content( + kind=processing_step.cache_kind, cursor=cursor, limit=cache_reports_with_content_num_results + ), + max_age=max_age, + ) + except InvalidCursor as e: + raise InvalidParameterError("Invalid cursor.") from e + except InvalidLimit as e: + raise UnexpectedError( + "Invalid limit. CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS must be a strictly positive integer." + ) from e + except AdminCustomError as e: + return get_json_admin_error_response(e, max_age=max_age) + except Exception: + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=max_age) + + return cache_reports_with_content_endpoint diff --git a/services/admin/tests/test_app.py b/services/admin/tests/test_app.py index 17cf7075..9eb72b76 100644 --- a/services/admin/tests/test_app.py +++ b/services/admin/tests/test_app.py @@ -106,0 +107,27 @@ def test_cache_reports( + + [email protected]( + "cursor,http_status,error_code", + [ + (None, 200, None), + ("", 200, None), + ("invalid cursor", 422, "InvalidParameter"), + ], +) +def test_cache_reports_with_content( + client: TestClient, + processing_steps: List[ProcessingStep], + cursor: Optional[str], + http_status: int, + error_code: Optional[str], +) -> None: + path = processing_steps[0].endpoint + cursor_str = f"?cursor={cursor}" if cursor else "" + response = client.get(f"/cache-reports-with-content{path}{cursor_str}") + assert response.status_code == http_status + if error_code: + assert isinstance(response.json()["error"], str) + assert response.headers["X-Error-Code"] == error_code + else: + assert response.json() == {"cache_reports_with_content": [], "next_cursor": ""} + assert "X-Error-Code" not in response.headers diff --git a/services/api/poetry.lock b/services/api/poetry.lock index c117b691..d48e8770 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -370 +370 @@ name = "libcommon" -version = "0.6.7" +version = "0.6.8" @@ -388 +388 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" @@ -1029 +1029 @@ python-versions = "3.9.15" -content-hash = "bee379a601705af45f72bc995b508b361f473c1a9351f3aaa5028a186aa4a864" +content-hash = "9fc9f9e46fd5dc0a83603f51225495b7d9b86ff64f6b1481d379a498fa44d522" @@ -1290 +1290 @@ libcommon = [ - {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, + {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index cc6e5cfc..880a093e 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false } diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index e64091ca..604c441b 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -31,0 +32 @@ services: + ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS: ${ADMIN_CACHE_REPORTS_WITH_CONTENT_NUM_RESULTS-100} diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index f0c3721a..cb5ced04 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -876 +876 @@ name = "libcommon" -version = "0.6.7" +version = "0.6.8" @@ -894 +894 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl" @@ -2482 +2482 @@ python-versions = "3.9.15" -content-hash = "65350a7746bea0eb1f159f258c9df6d2207c871cb9eb76c9be5356e2da033ed3" +content-hash = "9f6679bc511c61a3619696cafabd8c404ca8cecee25d1e23ea7903daea4cca3e" @@ -3386 +3386 @@ libcommon = [ - {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, + {file = "libcommon-0.6.8-py3-none-any.whl", hash = "sha256:28ae018d0416b1ebdbb8fc238157eb01f84608cd3880d23e9a7eef1f73c46908"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 588f2d0b..072f7354 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.8-py3-none-any.whl", develop = false }
8c837fb3bcedc5ffd1783a2be73304e2c8322543
Sylvain Lesage
2023-01-23T17:40:18
feat: 🎸 launch children jobs even when skipped (#695)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 85a0a0f8..1a5df9db 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-9ab3e6e" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-41c7e41" diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py index a1f2db22..16ebe1d4 100644 --- a/workers/datasets_based/src/datasets_based/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -16,0 +17 @@ from libcommon.simple_cache import ( + get_response, @@ -185,6 +186,3 @@ class Worker(ABC): - if self.should_skip_job(): - return Status.SKIPPED - elif self.process(): - return Status.SUCCESS - else: - return Status.ERROR + result: Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED] = ( + Status.SKIPPED if self.should_skip_job() else Status.SUCCESS if self.process() else Status.ERROR + ) @@ -193 +191,3 @@ class Worker(ABC): - return Status.ERROR + result = Status.ERROR + self.create_children_jobs() + return result @@ -265 +264,0 @@ class Worker(ABC): - self.create_children_jobs(self.get_new_splits(content)) @@ -321,5 +320 @@ class Worker(ABC): - Can be empty.""" - return set() - - def create_children_jobs(self, new_split_full_names: set[SplitFullName]) -> None: - """Create children jobs for the current job. + Can be empty. @@ -328,2 +323,3 @@ class Worker(ABC): - new_split_full_names (:obj:`set[SplitFullName]`): the set of new splits, from the content created by the - compute. Can be empty. + content (:obj:`Mapping[str, Any]`): the content created by the compute. + Returns: + :obj:`set[SplitFullName]`: the set of new splits full names. @@ -331,4 +327,15 @@ class Worker(ABC): - for processing_step in self.processing_step.children: - if processing_step.input_type == "dataset": - Queue(type=processing_step.job_type).upsert_job( - dataset=self.dataset, config=None, split=None, force=self.force + return set() + + def create_children_jobs(self) -> None: + """Create children jobs for the current job.""" + dataset_children = [c for c in self.processing_step.children if c.input_type == "dataset"] + for processing_step in dataset_children: + Queue(type=processing_step.job_type).upsert_job( + dataset=self.dataset, config=None, split=None, force=self.force + ) + + split_children = [c for c in self.processing_step.children if c.input_type == "split"] + if len(split_children) > 0: + try: + response_in_cache = get_response( + kind=self.processing_step.cache_kind, dataset=self.dataset, config=self.config, split=self.split @@ -336 +343,8 @@ class Worker(ABC): - elif processing_step.input_type == "split": + except Exception: + # if the response is not in the cache, we don't create the children jobs + return + if response_in_cache["http_status"] != HTTPStatus.OK: + # if the response is not valid, we don't create the children jobs + return + new_split_full_names: set[SplitFullName] = self.get_new_splits(response_in_cache["content"]) + for processing_step in split_children: diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py index 200b2543..f765d39c 100644 --- a/workers/datasets_based/tests/test_worker.py +++ b/workers/datasets_based/tests/test_worker.py @@ -174 +174 @@ def test_create_children_jobs() -> None: - worker.process() + worker.run()
984f0b59c5d220d1341df89a42aea22c339cde52
Sylvain Lesage
2023-01-23T15:17:22
feat: 🎸 replace Queue.add_job with Queue.upsert_job (#694)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 237246b9..85a0a0f8 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-61dda8c" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-9ab3e6e" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-61dda8c", - "api": "huggingface/datasets-server-services-api:sha-61dda8c" + "admin": "huggingface/datasets-server-services-admin:sha-9ab3e6e", + "api": "huggingface/datasets-server-services-api:sha-9ab3e6e" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-47d3297" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-9ab3e6e" diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 708b5d7e..132432cf 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -117,11 +116,0 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - @@ -318 +307 @@ name = "libcommon" -version = "0.6.6" +version = "0.6.7" @@ -336 +325 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" @@ -345,0 +335,21 @@ python-versions = "*" +[[package]] +name = "markdown-it-py" +version = "2.1.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"] +code-style = ["pre-commit (==2.6)"] +compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"] +linkify = ["linkify-it-py (>=1.0,<2.0)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + @@ -370,0 +381,8 @@ python-versions = "*" +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +category = "dev" +optional = false +python-versions = ">=3.7" + @@ -487 +505 @@ name = "pip-audit" -version = "2.4.13" +version = "2.4.14" @@ -505,2 +523,3 @@ toml = ">=0.10" -dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] +dev = ["build", "bump (>=1.3.2)", "pip-audit[doc,lint,test]"] +doc = ["pdoc"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "ruff (<0.0.228)", "types-html5lib", "types-requests", "types-toml"] @@ -668 +687 @@ name = "python-dotenv" -version = "0.21.0" +version = "0.21.1" @@ -719 +738 @@ name = "rich" -version = "13.1.0" +version = "13.2.0" @@ -726 +745 @@ python-versions = ">=3.7.0" -commonmark = ">=0.9.0,<0.10.0" +markdown-it-py = ">=2.1.0,<3.0.0" @@ -734 +753 @@ name = "setuptools" -version = "66.0.0" +version = "66.1.1" @@ -861 +880 @@ python-versions = "3.9.15" -content-hash = "63b2a3d951b41381ea9d1592aa7dbb8ade73969bb73647aa5998222a8d29d79e" +content-hash = "9d0680f7c12a357489e3ebf925e9e035430cd5ceebded0f0288ca82a461d0315" @@ -996,4 +1014,0 @@ colorama = [ -commonmark = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] @@ -1106 +1121 @@ libcommon = [ - {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, + {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, @@ -1111,0 +1127,4 @@ lockfile = [ +markdown-it-py = [ + {file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"}, + {file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"}, +] @@ -1119,0 +1139,4 @@ mccabe = [ +mdurl = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] @@ -1281,2 +1304,2 @@ pip-audit = [ - {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, - {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, + {file = "pip_audit-2.4.14-py3-none-any.whl", hash = "sha256:f9632b9f67bcf3fda78ef7651a03c8ed926d1eaeda474dcbdcb26a5518dd6ffc"}, + {file = "pip_audit-2.4.14.tar.gz", hash = "sha256:1259629fe24302e257052e977146f56bebf34927740d5efd184aaafa3b1b3b38"}, @@ -1452,2 +1475,2 @@ python-dotenv = [ - {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, - {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, + {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, + {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, @@ -1506,2 +1529,2 @@ rich = [ - {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, - {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, + {file = "rich-13.2.0-py3-none-any.whl", hash = "sha256:7c963f0d03819221e9ac561e1bc866e3f95a02248c1234daa48954e6d381c003"}, + {file = "rich-13.2.0.tar.gz", hash = "sha256:f1a00cdd3eebf999a15d85ec498bfe0b1a77efe9b34f645768a54132ef444ac5"}, @@ -1510,2 +1533,2 @@ setuptools = [ - {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, - {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, + {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"}, + {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 962e5974..8832f87f 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } diff --git a/libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl new file mode 100644 index 00000000..64a5f2f5 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.7.tar.gz b/libs/libcommon/dist/libcommon-0.6.7.tar.gz new file mode 100644 index 00000000..f9b60e78 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.7.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index dc8c4404..728c9043 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.6.6" +version = "0.6.7" diff --git a/libs/libcommon/src/libcommon/operations.py b/libs/libcommon/src/libcommon/operations.py index effe1338..8fcf0949 100644 --- a/libs/libcommon/src/libcommon/operations.py +++ b/libs/libcommon/src/libcommon/operations.py @@ -48 +48 @@ def update_dataset( - Queue(type=init_processing_step.job_type).add_job(dataset=dataset, force=force) + Queue(type=init_processing_step.job_type).upsert_job(dataset=dataset, force=force) diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index 39bbbf73..bd2e0e59 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -190 +190 @@ class Queue: - def add_job( + def _add_job( @@ -194,0 +195,2 @@ class Queue: + This method should not be called directly. Use `upsert_job` instead. + @@ -214,0 +217,21 @@ class Queue: + def upsert_job( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> Job: + """Add, or update, a job to the queue in the waiting state. + + If jobs already exist with the same parameters in the waiting state, they are cancelled and replaced by a new + one. + + Args: + dataset (`str`): The dataset on which to apply the job. + config (`str`, optional): The config on which to apply the job. + split (`str`, optional): The config on which to apply the job. + force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. + + Returns: the job + """ + Job.objects(type=self.type, dataset=dataset, config=config, split=split, status=Status.WAITING).update( + finished_at=get_datetime(), status=Status.CANCELLED + ) + return self._add_job(dataset=dataset, config=config, split=split, force=force) + @@ -359 +382 @@ class Queue: - self.add_job(dataset=job.dataset, config=job.config, split=job.split) + self.upsert_job(dataset=job.dataset, config=job.config, split=job.split) diff --git a/libs/libcommon/tests/test_queue.py b/libs/libcommon/tests/test_queue.py index 90280c70..67b72199 100644 --- a/libs/libcommon/tests/test_queue.py +++ b/libs/libcommon/tests/test_queue.py @@ -18 +18 @@ def clean_mongo_database(queue_config: QueueConfig) -> None: -def test_add_job() -> None: +def test__add_job() -> None: @@ -24 +24 @@ def test_add_job() -> None: - queue.add_job(dataset=test_dataset, force=True) + queue._add_job(dataset=test_dataset, force=True) @@ -26 +26 @@ def test_add_job() -> None: - queue.add_job(dataset=test_dataset) + queue._add_job(dataset=test_dataset) @@ -38 +38 @@ def test_add_job() -> None: - queue.add_job(dataset=test_dataset, force=True) + queue._add_job(dataset=test_dataset, force=True) @@ -64,0 +65,38 @@ def test_add_job() -> None: +def test_upsert_job() -> None: + test_type = "test_type" + test_dataset = "test_dataset" + # get the queue + queue = Queue(test_type) + # upsert a job + queue.upsert_job(dataset=test_dataset, force=True) + # a second call creates a second waiting job, and the first one is cancelled + queue.upsert_job(dataset=test_dataset) + assert queue.is_job_in_process(dataset=test_dataset) is True + # get and start the last job + job_info = queue.start_job() + assert job_info["type"] == test_type + assert job_info["dataset"] == test_dataset + assert job_info["config"] is None + assert job_info["split"] is None + assert job_info["force"] is False + assert queue.is_job_in_process(dataset=test_dataset) is True + # adding the job while the first one has not finished yet adds a new waiting job + queue.upsert_job(dataset=test_dataset, force=True) + with pytest.raises(EmptyQueueError): + # but: it's not possible to start two jobs with the same arguments + queue.start_job() + # finish the first job + queue.finish_job(job_id=job_info["job_id"], finished_status=Status.SUCCESS) + # the queue is not empty + assert queue.is_job_in_process(dataset=test_dataset) is True + # process the second job + job_info = queue.start_job() + assert job_info["force"] is True + queue.finish_job(job_id=job_info["job_id"], finished_status=Status.SUCCESS) + # the queue is empty + assert queue.is_job_in_process(dataset=test_dataset) is False + with pytest.raises(EmptyQueueError): + # an error is raised if we try to start a job + queue.start_job() + + @@ -74,8 +112,8 @@ def test_priority_to_non_started_datasets() -> None: - queue.add_job(dataset="dataset1", config="config", split="split1") - queue.add_job(dataset="dataset1", config="config", split="split1") - queue.add_job(dataset="dataset1/dataset", config="config", split="split1") - queue.add_job(dataset="dataset1", config="config", split="split2") - queue.add_job(dataset="dataset2", config="config", split="split1") - queue.add_job(dataset="dataset2", config="config", split="split2") - queue.add_job(dataset="dataset3", config="config", split="split1") - check_job(queue=queue, expected_dataset="dataset1", expected_split="split1") + queue.upsert_job(dataset="dataset1", config="config", split="split1") + queue.upsert_job(dataset="dataset1/dataset", config="config", split="split1") + queue.upsert_job(dataset="dataset1", config="config", split="split2") + queue.upsert_job(dataset="dataset2", config="config", split="split1") + queue.upsert_job(dataset="dataset2", config="config", split="split2") + queue.upsert_job(dataset="dataset3", config="config", split="split1") + queue.upsert_job(dataset="dataset1", config="config", split="split1") + check_job(queue=queue, expected_dataset="dataset1/dataset", expected_split="split1") @@ -84,2 +121,0 @@ def test_priority_to_non_started_datasets() -> None: - check_job(queue=queue, expected_dataset="dataset1/dataset", expected_split="split1") - check_job(queue=queue, expected_dataset="dataset2", expected_split="split2") @@ -86,0 +123,2 @@ def test_priority_to_non_started_datasets() -> None: + check_job(queue=queue, expected_dataset="dataset2", expected_split="split2") + check_job(queue=queue, expected_dataset="dataset1", expected_split="split1") @@ -88,3 +125,0 @@ def test_priority_to_non_started_datasets() -> None: - # raises even if there is still a waiting job - # (dataset="dataset1", config="config", split="split1") - # because a job with the same arguments is already started @@ -100 +135 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - queue.add_job(dataset=test_dataset, config=test_config, split="split1") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split1") @@ -102,2 +137,2 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - queue.add_job(dataset=test_dataset, config=test_config, split="split2") - queue.add_job(dataset=test_dataset, config=test_config, split="split3") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split2") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split3") @@ -138 +173 @@ def test_count_by_status() -> None: - queue.add_job(dataset=test_dataset) + queue.upsert_job(dataset=test_dataset) @@ -143 +178 @@ def test_count_by_status() -> None: - queue_other.add_job(dataset=test_dataset) + queue_other.upsert_job(dataset=test_dataset) @@ -154,5 +189,5 @@ def test_get_total_duration_per_dataset() -> None: - queue.add_job(dataset=test_dataset, config=test_config, split="split1") - queue.add_job(dataset=test_dataset, config=test_config, split="split2") - queue.add_job(dataset=test_dataset, config=test_config, split="split3") - queue.add_job(dataset=test_dataset, config=test_config, split="split4") - queue.add_job(dataset=test_dataset, config=test_config, split="split5") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split1") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split2") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split3") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split4") + queue.upsert_job(dataset=test_dataset, config=test_config, split="split5") diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index dab64664..c95283f0 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -354 +354 @@ name = "libcommon" -version = "0.6.6" +version = "0.6.7" @@ -372 +372 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" @@ -995 +995 @@ python-versions = "3.9.15" -content-hash = "93227842402cf05aad83e31f6aa4a55804a8073f25a214ac34d13c818e84ef0b" +content-hash = "b6b99db1d8a2bfabdfff688753c5845fd48ec28f11b47a9ed9bae2f5d730cccd" @@ -1252 +1252 @@ libcommon = [ - {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, + {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 41911fc1..dc9fabe8 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/routes/force_refresh.py b/services/admin/src/admin/routes/force_refresh.py index 134e0391..94d37939 100644 --- a/services/admin/src/admin/routes/force_refresh.py +++ b/services/admin/src/admin/routes/force_refresh.py @@ -52 +52 @@ def create_force_refresh_endpoint( - Queue(type=processing_step.job_type).add_job(dataset=dataset, config=config, split=split, force=True) + Queue(type=processing_step.job_type).upsert_job(dataset=dataset, config=config, split=split, force=True) diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 51a415e9..c117b691 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -370 +370 @@ name = "libcommon" -version = "0.6.6" +version = "0.6.7" @@ -388 +388 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" @@ -1029 +1029 @@ python-versions = "3.9.15" -content-hash = "d87f7385b9af864a5f8846bab395f89f25d74976984bdeafac2e83911524cc66" +content-hash = "bee379a601705af45f72bc995b508b361f473c1a9351f3aaa5028a186aa4a864" @@ -1290 +1290 @@ libcommon = [ - {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, + {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index d3fe8d38..cc6e5cfc 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index b756355b..f0c3721a 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -876 +876 @@ name = "libcommon" -version = "0.6.6" +version = "0.6.7" @@ -894 +894 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl" @@ -2482 +2482 @@ python-versions = "3.9.15" -content-hash = "7be4e8e664880da11c0b018fc1902c413ecafbf410f5513c6bae66e85e631164" +content-hash = "65350a7746bea0eb1f159f258c9df6d2207c871cb9eb76c9be5356e2da033ed3" @@ -3386 +3386 @@ libcommon = [ - {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, + {file = "libcommon-0.6.7-py3-none-any.whl", hash = "sha256:f6768dad6f6554ed4cc60351ff924d08f2186e119918d3a0f088de9a7d87bf48"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 1619056c..588f2d0b 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.7-py3-none-any.whl", develop = false } diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py index f43511ea..a1f2db22 100644 --- a/workers/datasets_based/src/datasets_based/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -333 +333 @@ class Worker(ABC): - Queue(type=processing_step.job_type).add_job( + Queue(type=processing_step.job_type).upsert_job( @@ -356 +356 @@ class Worker(ABC): - Queue(type=processing_step.job_type).add_job( + Queue(type=processing_step.job_type).upsert_job( diff --git a/workers/datasets_based/tests/test_worker_loop.py b/workers/datasets_based/tests/test_worker_loop.py index 8acbf23d..8e969c02 100644 --- a/workers/datasets_based/tests/test_worker_loop.py +++ b/workers/datasets_based/tests/test_worker_loop.py @@ -60 +60 @@ def test_process_next_job( - worker_loop.queue.add_job(dataset=dataset, config=config, split=split) + worker_loop.queue.upsert_job(dataset=dataset, config=config, split=split)
fd39deb1eb1252eecff0ed49dee927703f42db0a
Sylvain Lesage
2023-01-23T10:17:19
feat: 🎸 add support for pdf2image (#691)
diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index c67052a7..a015f530 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -39 +39 @@ jobs: - run: sudo apt update; sudo apt install -y libicu-dev ffmpeg libavcodec-extra libsndfile1 llvm pkg-config + run: sudo apt update; sudo apt install -y libicu-dev ffmpeg libavcodec-extra libsndfile1 llvm pkg-config poppler-utils diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index d7155b0f..6109f079 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -40 +40 @@ jobs: - run: sudo apt update; sudo apt install -y libicu-dev ffmpeg libavcodec-extra libsndfile1 llvm pkg-config + run: sudo apt update; sudo apt install -y libicu-dev ffmpeg libavcodec-extra libsndfile1 llvm pkg-config poppler-utils diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 52eceede..237246b9 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-60a0b7b" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-47d3297" diff --git a/workers/datasets_based/Dockerfile b/workers/datasets_based/Dockerfile index 83c77053..39ee8c95 100644 --- a/workers/datasets_based/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -19,0 +20 @@ RUN apt-get update \ + poppler-utils \ diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index fff9c50e..b756355b 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -848,0 +849 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" + @@ -1274,0 +1276,11 @@ python-versions = ">=2.6" +[[package]] +name = "pdf2image" +version = "1.16.2" +description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pillow = "*" + @@ -1602,0 +1615,18 @@ diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pypdf2" +version = "3.0.1" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +crypto = ["PyCryptodome"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow", "PyCryptodome"] +image = ["Pillow"] + @@ -2152,0 +2183 @@ url = "https://download.pytorch.org/whl/cpu/torch-1.13.1%2Bcpu-cp39-cp39-linux_x + @@ -2166,0 +2198 @@ url = "https://download.pytorch.org/whl/cpu/torchaudio-0.13.1%2Bcpu-cp39-cp39-li + @@ -2450 +2482 @@ python-versions = "3.9.15" -content-hash = "0fa47399ab7f3f7a1bd3676b6b171e166336318c18270d468b112c5701595f43" +content-hash = "7be4e8e664880da11c0b018fc1902c413ecafbf410f5513c6bae66e85e631164" @@ -3884,0 +3917,4 @@ pbr = [ +pdf2image = [ + {file = "pdf2image-1.16.2-py3-none-any.whl", hash = "sha256:1469335050a17657f94c2f1ef3a23e57807d631ad5bcbaec997c2c42a8186f4a"}, + {file = "pdf2image-1.16.2.tar.gz", hash = "sha256:86761091eee35f4641ea98dfddb254254361d018be698a199aff7c1d37331803"}, +] @@ -4329,0 +4366,4 @@ pyparsing = [ +pypdf2 = [ + {file = "PyPDF2-3.0.1.tar.gz", hash = "sha256:a74408f69ba6271f71b9352ef4ed03dc53a31aa404d29b5d31f53bfecfee1440"}, + {file = "pypdf2-3.0.1-py3-none-any.whl", hash = "sha256:d16e4205cfee272fbdc0568b68d82be796540b1537508cef59388f839c191928"}, +] diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index b3b5672a..1619056c 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -25,0 +26 @@ openpyxl = "^3.0.9" +pdf2image = "^1.16.2" @@ -27,0 +29 @@ pydub = "^0.25.1" +pypdf2 = "^3.0.1"
d7d1dd70c40f8b0d68ec16cf534be42df6eb1f71
Sylvain Lesage
2023-01-20T20:10:51
feat: 🎸 block more datasets, and allow more /first-rows per ns (#690)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 96922c51..73365ec5 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -199 +199 @@ firstRows: - maxJobsPerNamespace: 4 + maxJobsPerNamespace: 10 @@ -214 +214 @@ parquetAndDatasetInfo: - blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers,bnl_newspapers,multilingual_librispeech,openslr,GEM/BiSECT,Graphcore/gqa,SaulLu/Natural_Questions_HTML_reduced_all,ccdv/cnn_dailymail,mozilla-foundation/common_voice_1_0,huggan/anime-faces,Biomedical-TeMU/ProfNER_corpus_NER,MorVentura/TRBLLmaker,student/celebA,Rodion/uno_sustainable_development_goals,Nart/parallel-ab-ru,HuggingFaceM4/VQAv2,mesolitica/noisy-ms-en-augmentation,nateraw/rice-image-dataset,tensorcat/wikipedia-japanese,angelolab/ark_example,RAYZ/Mixed-Dia,ywchoi/mdpi_sept10,TomTBT/pmc_open_access_figure,society-ethics/lila_camera_traps,autoevaluator/shoes-vs-sandals-vs-boots,cjvt/slo_collocations,parambharat/mile_dataset,rossevine/tesis,ksaml/Stanford_dogs,nuprl/MultiPL-E-raw-data,ZihaoLin/zhlds,ACL-OCL/acl-anthology-corpus,mozilla-foundation/common_voice_2_0,Biomedical-TeMU/SPACCC_Sentence-Splitter,nateraw/rice-image-dataset-2,mesolitica/noisy-en-ms-augmentation,bigbio/ctebmsp,bigbio/distemist,nlphuji/vasr,parambharat/malayalam_asr_corpus,cjvt/sloleks,DavidVivancos/MindBigData2022_Imagenet_IN_Spct,KokeCacao/oracle,keremberke/nfl-object-detection,lafi23333/ds,Lykon/OnePiece,kaliansh/sdaia,sil-ai/audio-kw-in-context,andite/riyo-tag,ilhanemirhan/eee543,backslashlim/LoRA-Datasets,hr16/Miwano-Rag,ccdv/mediasum" + blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers,bnl_newspapers,multilingual_librispeech,openslr,GEM/BiSECT,Graphcore/gqa,SaulLu/Natural_Questions_HTML_reduced_all,ccdv/cnn_dailymail,mozilla-foundation/common_voice_1_0,huggan/anime-faces,Biomedical-TeMU/ProfNER_corpus_NER,MorVentura/TRBLLmaker,student/celebA,Rodion/uno_sustainable_development_goals,Nart/parallel-ab-ru,HuggingFaceM4/VQAv2,mesolitica/noisy-ms-en-augmentation,nateraw/rice-image-dataset,tensorcat/wikipedia-japanese,angelolab/ark_example,RAYZ/Mixed-Dia,ywchoi/mdpi_sept10,TomTBT/pmc_open_access_figure,society-ethics/lila_camera_traps,autoevaluator/shoes-vs-sandals-vs-boots,cjvt/slo_collocations,parambharat/mile_dataset,rossevine/tesis,ksaml/Stanford_dogs,nuprl/MultiPL-E-raw-data,ZihaoLin/zhlds,ACL-OCL/acl-anthology-corpus,mozilla-foundation/common_voice_2_0,Biomedical-TeMU/SPACCC_Sentence-Splitter,nateraw/rice-image-dataset-2,mesolitica/noisy-en-ms-augmentation,bigbio/ctebmsp,bigbio/distemist,nlphuji/vasr,parambharat/malayalam_asr_corpus,cjvt/sloleks,DavidVivancos/MindBigData2022_Imagenet_IN_Spct,KokeCacao/oracle,keremberke/nfl-object-detection,lafi23333/ds,Lykon/OnePiece,kaliansh/sdaia,sil-ai/audio-kw-in-context,andite/riyo-tag,ilhanemirhan/eee543,backslashlim/LoRA-Datasets,hr16/Miwano-Rag,ccdv/mediasum,mozilla-foundation/common_voice_3_0,mozilla-foundation/common_voice_4_0,bigbio/ebm_pico,parambharat/kannada_asr_corpus,parambharat/telugu_asr_corpus,Abuelnour/json_1000_Scientific_Paper,reazon-research/reazonspeech,shunk031/livedoor-news-corpus,mesolitica/translated-SQUAD,SamAct/medium_cleaned,EfaceD/ElysiumInspirations,cahya/fleurs,guangguang/azukijpg,genjib/LAVISHData,rohitp1/librispeech_asr_clean,azraahmadi/autotrain-data-xraydatasetp2,HuggingFaceM4/COCO,bio-datasets/e3c,nateraw/auto-cats-and-dogs,keremberke/smoke-object-detection,ds4sd/DocLayNet,nlphuji/utk_faces,corentinm7/MyoQuant-SDH-Data,xglue,grasshoff/lhc_sents,HugoLaurencon/IIIT-5K,alkzar90/CC6204-Hackaton-Cub-Dataset,RaphaelOlivier/whisper_adversarial_examples,bruno-cotrim/arch-max,keshan/multispeaker-tts-sinhala,Tevatron/beir-corpus,fcakyon/gun-object-detection,ccdv/arxiv-summarization"
47578935bd988c98a97e14ec1138c691cc61695b
Sylvain Lesage
2023-01-20T13:19:09
chore: 🤖 update resources (#686)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 09c85109..96922c51 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -179 +179 @@ splits: - maxJobsPerNamespace: 4 + maxJobsPerNamespace: 5 @@ -183 +183 @@ splits: - replicas: 40 + replicas: 12 @@ -203 +203 @@ firstRows: - replicas: 64 + replicas: 80 @@ -214 +214 @@ parquetAndDatasetInfo: - blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers" + blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers,bnl_newspapers,multilingual_librispeech,openslr,GEM/BiSECT,Graphcore/gqa,SaulLu/Natural_Questions_HTML_reduced_all,ccdv/cnn_dailymail,mozilla-foundation/common_voice_1_0,huggan/anime-faces,Biomedical-TeMU/ProfNER_corpus_NER,MorVentura/TRBLLmaker,student/celebA,Rodion/uno_sustainable_development_goals,Nart/parallel-ab-ru,HuggingFaceM4/VQAv2,mesolitica/noisy-ms-en-augmentation,nateraw/rice-image-dataset,tensorcat/wikipedia-japanese,angelolab/ark_example,RAYZ/Mixed-Dia,ywchoi/mdpi_sept10,TomTBT/pmc_open_access_figure,society-ethics/lila_camera_traps,autoevaluator/shoes-vs-sandals-vs-boots,cjvt/slo_collocations,parambharat/mile_dataset,rossevine/tesis,ksaml/Stanford_dogs,nuprl/MultiPL-E-raw-data,ZihaoLin/zhlds,ACL-OCL/acl-anthology-corpus,mozilla-foundation/common_voice_2_0,Biomedical-TeMU/SPACCC_Sentence-Splitter,nateraw/rice-image-dataset-2,mesolitica/noisy-en-ms-augmentation,bigbio/ctebmsp,bigbio/distemist,nlphuji/vasr,parambharat/malayalam_asr_corpus,cjvt/sloleks,DavidVivancos/MindBigData2022_Imagenet_IN_Spct,KokeCacao/oracle,keremberke/nfl-object-detection,lafi23333/ds,Lykon/OnePiece,kaliansh/sdaia,sil-ai/audio-kw-in-context,andite/riyo-tag,ilhanemirhan/eee543,backslashlim/LoRA-Datasets,hr16/Miwano-Rag,ccdv/mediasum" @@ -225 +225 @@ parquetAndDatasetInfo: - replicas: 40 + replicas: 55
31067d3c643719c4a3331261d4a34ee00cdccd65
Sylvain Lesage
2023-01-19T16:23:50
fix: 🐛 fix memory specification + increase pods in /parquet (#684)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 0d417389..09c85109 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -242 +242 @@ parquet: - replicas: 2 + replicas: 4 @@ -246 +246 @@ parquet: - memory: "0.1Gi" + memory: "100Mi" @@ -259 +259 @@ datasetInfo: - replicas: 2 + replicas: 4 @@ -263 +263 @@ datasetInfo: - memory: "0.1Gi" + memory: "100Mi" @@ -276 +276 @@ sizes: - replicas: 2 + replicas: 4 @@ -280 +280 @@ sizes: - memory: "0.1Gi" + memory: "100Mi"
cfb12c5c60461463ff62b597280d229460570ef6
Sylvain Lesage
2023-01-19T15:34:19
feat: 🎸 increase resources` (#683)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 3e26187c..0d417389 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -183 +183 @@ splits: - replicas: 20 + replicas: 40 @@ -203 +203 @@ firstRows: - replicas: 32 + replicas: 64
009658336029005a761df7163e6c0019b89a71a6
Sylvain Lesage
2023-01-19T14:35:47
feat: 🎸 increase resources (#682)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index b23ba762..3e26187c 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -183 +183 @@ splits: - replicas: 12 + replicas: 20 @@ -203 +203 @@ firstRows: - replicas: 16 + replicas: 32
bbcbfa0ff48d3f81a0fe81529c2577767060fdea
Sylvain Lesage
2023-01-19T14:18:20
feat: 🎸 increase number of workers for a moment (#681)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 5029f41c..b23ba762 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -225 +225 @@ parquetAndDatasetInfo: - replicas: 16 + replicas: 40
4d5f20bd98be48c51fd8cf5bc3dc9c05c80f7bb6
Sylvain Lesage
2023-01-19T13:24:14
chore: 🤖 add --no-cache (poetry) and --no-cache-dir (pip) (#680)
diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index 71c15904..c67052a7 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -56 +56 @@ jobs: - run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.2 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^hffs @/d')" + run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.2 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^torch @/d' | sed '/^torchaudio @/d' | sed '/^libcommon @/d' | sed '/^trec-car-tools @/d' | sed '/^hffs @/d')" diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index bcecf7fb..52eceede 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-ca635d8" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-61dda8c" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-ca635d8", - "api": "huggingface/datasets-server-services-api:sha-ca635d8" + "admin": "huggingface/datasets-server-services-admin:sha-61dda8c", + "api": "huggingface/datasets-server-services-api:sha-61dda8c" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-ca635d8" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-60a0b7b" diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile index 5947de24..ea7c97a6 100644 --- a/jobs/mongodb_migration/Dockerfile +++ b/jobs/mongodb_migration/Dockerfile @@ -21 +21 @@ RUN pip install -U --no-cache-dir pip -RUN pip install "poetry==$POETRY_VERSION" +RUN pip install --no-cache-dir "poetry==$POETRY_VERSION" @@ -28 +28 @@ WORKDIR /src/jobs/mongodb_migration/ -RUN poetry install +RUN poetry install --no-cache @@ -30 +30 @@ COPY jobs/mongodb_migration/src ./src -RUN poetry install +RUN poetry install --no-cache diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index 5adbb598..344b510b 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -21 +21 @@ RUN pip install -U --no-cache-dir pip -RUN pip install "poetry==$POETRY_VERSION" +RUN pip install --no-cache-dir "poetry==$POETRY_VERSION" @@ -28 +28 @@ WORKDIR /src/services/admin/ -RUN poetry install +RUN poetry install --no-cache @@ -30 +30 @@ COPY services/admin/src ./src -RUN poetry install +RUN poetry install --no-cache diff --git a/services/api/Dockerfile b/services/api/Dockerfile index 5deb45d3..4201e26e 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -21 +21 @@ RUN pip install -U --no-cache-dir pip -RUN pip install "poetry==$POETRY_VERSION" +RUN pip install --no-cache-dir "poetry==$POETRY_VERSION" @@ -28 +28 @@ WORKDIR /src/services/api/ -RUN poetry install +RUN poetry install --no-cache @@ -30 +30 @@ COPY services/api/src ./src -RUN poetry install +RUN poetry install --no-cache diff --git a/tools/Python.mk b/tools/Python.mk index 4ae26589..d03e0205 100644 --- a/tools/Python.mk +++ b/tools/Python.mk @@ -35 +35 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.2 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^hffs @/d')" + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.2 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^torch @/d' | sed '/^torchaudio @/d' | sed '/^libcommon @/d' | sed '/^trec-car-tools @/d' | sed '/^hffs @/d')" diff --git a/workers/datasets_based/Dockerfile b/workers/datasets_based/Dockerfile index 75e3edab..83c77053 100644 --- a/workers/datasets_based/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -23 +23 @@ RUN pip install -U --no-cache-dir pip -RUN pip install "poetry==$POETRY_VERSION" +RUN pip install --no-cache-dir "poetry==$POETRY_VERSION" @@ -31 +31 @@ WORKDIR /src/workers/datasets_based/ -RUN poetry install +RUN poetry install --no-cache @@ -33 +33 @@ COPY workers/datasets_based/src ./src -RUN poetry install +RUN poetry install --no-cache diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 4a5c03f1..fff9c50e 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -291 +291 @@ name = "cloudpickle" -version = "2.2.0" +version = "2.2.1" @@ -849 +848,0 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" - @@ -981 +980 @@ name = "markupsafe" -version = "2.1.1" +version = "2.1.2" @@ -1166,48 +1164,0 @@ python-versions = ">=3.8" -[[package]] -name = "nvidia-cublas-cu11" -version = "11.10.3.66" -description = "CUBLAS native runtime libraries" -category = "main" -optional = false -python-versions = ">=3" - -[package.dependencies] -setuptools = "*" -wheel = "*" - -[[package]] -name = "nvidia-cuda-nvrtc-cu11" -version = "11.7.99" -description = "NVRTC native runtime libraries" -category = "main" -optional = false -python-versions = ">=3" - -[package.dependencies] -setuptools = "*" -wheel = "*" - -[[package]] -name = "nvidia-cuda-runtime-cu11" -version = "11.7.99" -description = "CUDA Runtime native Libraries" -category = "main" -optional = false -python-versions = ">=3" - -[package.dependencies] -setuptools = "*" -wheel = "*" - -[[package]] -name = "nvidia-cudnn-cu11" -version = "8.5.0.96" -description = "cuDNN runtime libraries" -category = "main" -optional = false -python-versions = ">=3" - -[package.dependencies] -setuptools = "*" -wheel = "*" - @@ -1283 +1234 @@ name = "pandas" -version = "1.5.2" +version = "1.5.3" @@ -2024 +1975 @@ python-versions = "*" -name = "tensorflow" +name = "tensorflow-cpu" @@ -2187 +2138 @@ name = "torch" -version = "1.13.1" +version = "1.13.1+cpu" @@ -2194,4 +2144,0 @@ python-versions = ">=3.7.0" -nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\""} -nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} -nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} -nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\""} @@ -2202,0 +2150,3 @@ opt-einsum = ["opt-einsum (>=3.3)"] +[package.source] +type = "url" +url = "https://download.pytorch.org/whl/cpu/torch-1.13.1%2Bcpu-cp39-cp39-linux_x86_64.whl" @@ -2205 +2155 @@ name = "torchaudio" -version = "0.13.1" +version = "0.13.1+cpu" @@ -2212 +2162 @@ python-versions = "*" -torch = "*" +torch = "1.13.1" @@ -2213,0 +2164,3 @@ torch = "*" +[package.source] +type = "url" +url = "https://download.pytorch.org/whl/cpu/torchaudio-0.13.1%2Bcpu-cp39-cp39-linux_x86_64.whl" @@ -2337 +2290 @@ name = "types-psutil" -version = "5.9.5.5" +version = "5.9.5.6" @@ -2353 +2306 @@ name = "types-requests" -version = "2.28.11.7" +version = "2.28.11.8" @@ -2497 +2450 @@ python-versions = "3.9.15" -content-hash = "c1d20085c45680ac61d073768cd0111aaf366065d3db2c18ab94b2dbb9bf3065" +content-hash = "0fa47399ab7f3f7a1bd3676b6b171e166336318c18270d468b112c5701595f43" @@ -2874,2 +2827,2 @@ cloudpickle = [ - {file = "cloudpickle-2.2.0-py3-none-any.whl", hash = "sha256:7428798d5926d8fcbfd092d18d01a2a03daf8237d8fcdc8095d256b8490796f0"}, - {file = "cloudpickle-2.2.0.tar.gz", hash = "sha256:3f4219469c55453cfe4737e564b67c2a149109dabf7f242478948b895f61106f"}, + {file = "cloudpickle-2.2.1-py3-none-any.whl", hash = "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f"}, + {file = "cloudpickle-2.2.1.tar.gz", hash = "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5"}, @@ -3529,40 +3482,50 @@ markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, @@ -3815,17 +3777,0 @@ numpy = [ -nvidia-cublas-cu11 = [ - {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, - {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, -] -nvidia-cuda-nvrtc-cu11 = [ - {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, - {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, - {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, -] -nvidia-cuda-runtime-cu11 = [ - {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, - {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, -] -nvidia-cudnn-cu11 = [ - {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, - {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, -] @@ -3899,27 +3845,27 @@ pandas = [ - {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9dbacd22555c2d47f262ef96bb4e30880e5956169741400af8b306bbb24a273"}, - {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e2b83abd292194f350bb04e188f9379d36b8dfac24dd445d5c87575f3beaf789"}, - {file = "pandas-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2552bffc808641c6eb471e55aa6899fa002ac94e4eebfa9ec058649122db5824"}, - {file = "pandas-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc87eac0541a7d24648a001d553406f4256e744d92df1df8ebe41829a915028"}, - {file = "pandas-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d8fd58df5d17ddb8c72a5075d87cd80d71b542571b5f78178fb067fa4e9c72"}, - {file = "pandas-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:4aed257c7484d01c9a194d9a94758b37d3d751849c05a0050c087a358c41ad1f"}, - {file = "pandas-1.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:375262829c8c700c3e7cbb336810b94367b9c4889818bbd910d0ecb4e45dc261"}, - {file = "pandas-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc3cd122bea268998b79adebbb8343b735a5511ec14efb70a39e7acbc11ccbdc"}, - {file = "pandas-1.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4f5a82afa4f1ff482ab8ded2ae8a453a2cdfde2001567b3ca24a4c5c5ca0db3"}, - {file = "pandas-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8092a368d3eb7116e270525329a3e5c15ae796ccdf7ccb17839a73b4f5084a39"}, - {file = "pandas-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6257b314fc14958f8122779e5a1557517b0f8e500cfb2bd53fa1f75a8ad0af2"}, - {file = "pandas-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:82ae615826da838a8e5d4d630eb70c993ab8636f0eff13cb28aafc4291b632b5"}, - {file = "pandas-1.5.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:457d8c3d42314ff47cc2d6c54f8fc0d23954b47977b2caed09cd9635cb75388b"}, - {file = "pandas-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c009a92e81ce836212ce7aa98b219db7961a8b95999b97af566b8dc8c33e9519"}, - {file = "pandas-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71f510b0efe1629bf2f7c0eadb1ff0b9cf611e87b73cd017e6b7d6adb40e2b3a"}, - {file = "pandas-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a40dd1e9f22e01e66ed534d6a965eb99546b41d4d52dbdb66565608fde48203f"}, - {file = "pandas-1.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae7e989f12628f41e804847a8cc2943d362440132919a69429d4dea1f164da0"}, - {file = "pandas-1.5.2-cp38-cp38-win32.whl", hash = "sha256:530948945e7b6c95e6fa7aa4be2be25764af53fba93fe76d912e35d1c9ee46f5"}, - {file = "pandas-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:73f219fdc1777cf3c45fde7f0708732ec6950dfc598afc50588d0d285fddaefc"}, - {file = "pandas-1.5.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9608000a5a45f663be6af5c70c3cbe634fa19243e720eb380c0d378666bc7702"}, - {file = "pandas-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:315e19a3e5c2ab47a67467fc0362cb36c7c60a93b6457f675d7d9615edad2ebe"}, - {file = "pandas-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e18bc3764cbb5e118be139b3b611bc3fbc5d3be42a7e827d1096f46087b395eb"}, - {file = "pandas-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0183cb04a057cc38fde5244909fca9826d5d57c4a5b7390c0cc3fa7acd9fa883"}, - {file = "pandas-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344021ed3e639e017b452aa8f5f6bf38a8806f5852e217a7594417fb9bbfa00e"}, - {file = "pandas-1.5.2-cp39-cp39-win32.whl", hash = "sha256:e7469271497960b6a781eaa930cba8af400dd59b62ec9ca2f4d31a19f2f91090"}, - {file = "pandas-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:c218796d59d5abd8780170c937b812c9637e84c32f8271bbf9845970f8c1351f"}, - {file = "pandas-1.5.2.tar.gz", hash = "sha256:220b98d15cee0b2cd839a6358bd1f273d0356bf964c1a1aeb32d47db0215488b"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, @@ -4849,17 +4795,13 @@ tensorboard-plugin-wit = [ -tensorflow = [ - {file = "tensorflow-2.11.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6c049fec6c2040685d6f43a63e17ccc5d6b0abc16b70cc6f5e7d691262b5d2d0"}, - {file = "tensorflow-2.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcc8380820cea8f68f6c90b8aee5432e8537e5bb9ec79ac61a98e6a9a02c7d40"}, - {file = "tensorflow-2.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d973458241c8771bf95d4ba68ad5d67b094f72dd181c2d562ffab538c1b0dad7"}, - {file = "tensorflow-2.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:d470b772ee3c291a8c7be2331e7c379e0c338223c0bf532f5906d4556f17580d"}, - {file = "tensorflow-2.11.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:d29c1179149fa469ad68234c52c83081d037ead243f90e826074e2563a0f938a"}, - {file = "tensorflow-2.11.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cdba2fce00d6c924470d4fb65d5e95a4b6571a863860608c0c13f0393f4ca0d"}, - {file = "tensorflow-2.11.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2ab20f93d2b52a44b414ec6dcf82aa12110e90e0920039a27108de28ae2728"}, - {file = "tensorflow-2.11.0-cp37-cp37m-win_amd64.whl", hash = "sha256:445510f092f7827e1f60f59b8bfb58e664aaf05d07daaa21c5735a7f76ca2b25"}, - {file = "tensorflow-2.11.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:056d29f2212342536ce3856aa47910a2515eb97ec0a6cc29ed47fc4be1369ec8"}, - {file = "tensorflow-2.11.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17b29d6d360fad545ab1127db52592efd3f19ac55c1a45e5014da328ae867ab4"}, - {file = "tensorflow-2.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:335ab5cccd7a1c46e3d89d9d46913f0715e8032df8d7438f9743b3fb97b39f69"}, - {file = "tensorflow-2.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:d48da37c8ae711eb38047a56a052ca8bb4ee018a91a479e42b7a8d117628c32e"}, - {file = "tensorflow-2.11.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:d9cf25bca641f2e5c77caa3bfd8dd6b892a7aec0695c54d2a7c9f52a54a8d487"}, - {file = "tensorflow-2.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d28f9691ebc48c0075e271023b3f147ae2bc29a3d3a7f42d45019c6b4a700d2"}, - {file = "tensorflow-2.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:276a44210d956701899dc78ad0aa116a0071f22fb0bcc1ea6bb59f7646b08d11"}, - {file = "tensorflow-2.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:cc3444fe1d58c65a195a69656bf56015bf19dc2916da607d784b0a1e215ec008"}, +tensorflow-cpu = [ + {file = "tensorflow_cpu-2.11.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:91bac68200ddbdff757c9d3aec8a03ad12b5fef21b937ff287721076e43b58b4"}, + {file = "tensorflow_cpu-2.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b318429219392b2e73f72099db5b92cfd516171c1e10e4ef37b0f53166f627da"}, + {file = "tensorflow_cpu-2.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c9bbd54abc00858bd4722ddaa6ba6469f9730d626786b7bd19a544defb61f11"}, + {file = "tensorflow_cpu-2.11.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:6bb3f3a8b6a96025fdffde2526ca2c58bb36410a74163a498ca9b2d68d3ccfcf"}, + {file = "tensorflow_cpu-2.11.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdcc9f733285bb1c917cde6731edcbf2ecc5ca4bd8c6a4c168a7f478e4056654"}, + {file = "tensorflow_cpu-2.11.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57aee7f2f3eed2f6e26bc3695c967fa889c98cefb4b8bfb2f47e171d96c13a0a"}, + {file = "tensorflow_cpu-2.11.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c302c1b9728b4ce32eca8041e1375d51896832d84c84ce8eeb2577b73ffb0392"}, + {file = "tensorflow_cpu-2.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a125157fdb2b1191ca6321e78127f032ce06ae17349e9affd75595782cca4cf"}, + {file = "tensorflow_cpu-2.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:08cc63ea4728ac0246063cef4f79911367c194515a45cc247ac05eb6684cd4aa"}, + {file = "tensorflow_cpu-2.11.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:d47df7bf4e684639d3d83cc27d150c6d29b8bd5f0586ca0a9a040af6840a92b0"}, + {file = "tensorflow_cpu-2.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1954bccbd78681c3df0d4ac9f020a0ee44b17bd6b5962ebb8848479879f45bc7"}, + {file = "tensorflow_cpu-2.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:d5e3c0666abdc0d9c63790238a1b91a41f2e622b488df7276750f61351b12ccc"}, @@ -4964,44 +4906,2 @@ tomlkit = [ -torch = [ - {file = "torch-1.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:fd12043868a34a8da7d490bf6db66991108b00ffbeecb034228bfcbbd4197143"}, - {file = "torch-1.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d9fe785d375f2e26a5d5eba5de91f89e6a3be5d11efb497e76705fdf93fa3c2e"}, - {file = "torch-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:98124598cdff4c287dbf50f53fb455f0c1e3a88022b39648102957f3445e9b76"}, - {file = "torch-1.13.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:393a6273c832e047581063fb74335ff50b4c566217019cc6ace318cd79eb0566"}, - {file = "torch-1.13.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:0122806b111b949d21fa1a5f9764d1fd2fcc4a47cb7f8ff914204fd4fc752ed5"}, - {file = "torch-1.13.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:22128502fd8f5b25ac1cd849ecb64a418382ae81dd4ce2b5cebaa09ab15b0d9b"}, - {file = "torch-1.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:76024be052b659ac1304ab8475ab03ea0a12124c3e7626282c9c86798ac7bc11"}, - {file = "torch-1.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ea8dda84d796094eb8709df0fcd6b56dc20b58fdd6bc4e8d7109930dafc8e419"}, - {file = "torch-1.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2ee7b81e9c457252bddd7d3da66fb1f619a5d12c24d7074de91c4ddafb832c93"}, - {file = "torch-1.13.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:0d9b8061048cfb78e675b9d2ea8503bfe30db43d583599ae8626b1263a0c1380"}, - {file = "torch-1.13.1-cp37-none-macosx_11_0_arm64.whl", hash = "sha256:f402ca80b66e9fbd661ed4287d7553f7f3899d9ab54bf5c67faada1555abde28"}, - {file = "torch-1.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:727dbf00e2cf858052364c0e2a496684b9cb5aa01dc8a8bc8bbb7c54502bdcdd"}, - {file = "torch-1.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:df8434b0695e9ceb8cc70650afc1310d8ba949e6db2a0525ddd9c3b2b181e5fe"}, - {file = "torch-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5e1e722a41f52a3f26f0c4fcec227e02c6c42f7c094f32e49d4beef7d1e213ea"}, - {file = "torch-1.13.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:33e67eea526e0bbb9151263e65417a9ef2d8fa53cbe628e87310060c9dcfa312"}, - {file = "torch-1.13.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:eeeb204d30fd40af6a2d80879b46a7efbe3cf43cdbeb8838dd4f3d126cc90b2b"}, - {file = "torch-1.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:50ff5e76d70074f6653d191fe4f6a42fdbe0cf942fbe2a3af0b75eaa414ac038"}, - {file = "torch-1.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2c3581a3fd81eb1f0f22997cddffea569fea53bafa372b2c0471db373b26aafc"}, - {file = "torch-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:0aa46f0ac95050c604bcf9ef71da9f1172e5037fdf2ebe051962d47b123848e7"}, - {file = "torch-1.13.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6930791efa8757cb6974af73d4996b6b50c592882a324b8fb0589c6a9ba2ddaf"}, - {file = "torch-1.13.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e0df902a7c7dd6c795698532ee5970ce898672625635d885eade9976e5a04949"}, -] -torchaudio = [ - {file = "torchaudio-0.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e0f3dc6699506521364266704e6bf89d0d0579fd435d12c5c2f5858d52de4fa"}, - {file = "torchaudio-0.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ec72a17d4d2178829e7780682999b535cf57fe160d0c20b0d6bdc1ad1a87c4dd"}, - {file = "torchaudio-0.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:62e9b5c260a27231d905588b72d2e2984ff9cdbb557af86eb178982fd265198d"}, - {file = "torchaudio-0.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:2e47562cdcdd47cb8ed86a3cf053b7067cc9e88340f4550ae73d790ddbc12f21"}, - {file = "torchaudio-0.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5f2fc60206aa687eadc8cfb7c167784678936fbad13ccc583794fba3d6f77e1b"}, - {file = "torchaudio-0.13.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fa7cc1a2b3056fc6ceee6d60dbcdef58955a7ca534667d0db9b4fc9efa087a1"}, - {file = "torchaudio-0.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:901a6d65750fc3fb2c656ae274cc61599aa7d5472361fbc206e381a310d619d1"}, - {file = "torchaudio-0.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b798447283551def4d911dd6acb2c4bc08214e95f677f56c4f623fc99a90eff"}, - {file = "torchaudio-0.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6bb94deeaf05fab9ff2f34906d3c36b46032420c3a08253d8c452999c235119c"}, - {file = "torchaudio-0.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42ce5c66d304bc2cd68338916b8223e322e09a84dcbd9228814ef36bc477a37b"}, - {file = "torchaudio-0.13.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b093b3e7661c85168ec9dde2cf97345965ea0931d3d2a7e78bd409221e6d6998"}, - {file = "torchaudio-0.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:167f77ef385592a5af6f4e2ad1630a42ca1b70f905762fcd62e13dd4f163bdcf"}, - {file = "torchaudio-0.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3c48bcff00eae8180f87f58d1c9e7e9fd8c4cb7eb3ea8817935fb6048d152bc7"}, - {file = "torchaudio-0.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5de44b6b96a8d7a05650ef7377b2386650ddce92551d7dc02e05e7002aee5fd2"}, - {file = "torchaudio-0.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9d2170540de32ae031aab3936129868e896ea041617b6d6692dde6aa2dfb0a23"}, - {file = "torchaudio-0.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:91fcfbf47000402d12bff2624e6220a0fd3b8ca8ee6ff51edf5945ec39ab0a7f"}, - {file = "torchaudio-0.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:32592088b48dfcd2ca247ad5d081a9e0c61de0caabb993d68bac779326456d8d"}, - {file = "torchaudio-0.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3023aeb5c191047bef1681a3741bffd4a2164b58a64cad24dd37da5e1ac2d1f1"}, - {file = "torchaudio-0.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:418fbf93ba77b9742b05b76561be4fe7e8ded27cfe414828624765986b30ce5a"}, -] +torch = [] +torchaudio = [] @@ -5054,2 +4954,2 @@ types-psutil = [ - {file = "types-psutil-5.9.5.5.tar.gz", hash = "sha256:4f26fdb2cb064b274cbc6359fba4abf3b3a2993d7d4abc336ad0947568212c62"}, - {file = "types_psutil-5.9.5.5-py3-none-any.whl", hash = "sha256:e576bb81c74f7443b067e94f92435894d5dd561161bec3d6401727b63df009f0"}, + {file = "types-psutil-5.9.5.6.tar.gz", hash = "sha256:65f93589711ca48859602c955c4247c834d96d4d33a9cbe4142d89593ef33b3c"}, + {file = "types_psutil-5.9.5.6-py3-none-any.whl", hash = "sha256:07acd57594ff254285250ef70be0fe4efe0b11a30065b6dce62a856235a8ca10"}, @@ -5062,2 +4962,2 @@ types-requests = [ - {file = "types-requests-2.28.11.7.tar.gz", hash = "sha256:0ae38633734990d019b80f5463dfa164ebd3581998ac8435f526da6fe4d598c3"}, - {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, + {file = "types-requests-2.28.11.8.tar.gz", hash = "sha256:e67424525f84adfbeab7268a159d3c633862dafae15c5b19547ce1b55954f0a3"}, + {file = "types_requests-2.28.11.8-py3-none-any.whl", hash = "sha256:61960554baca0008ae7e2db2bd3b322ca9a144d3e80ce270f5fb640817e40994"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index be537172..b3b5672a 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -31 +31 @@ scikit-learn = "^1.0" -tensorflow = {version = "^2.9.1", markers = "sys_platform != 'darwin' or platform_machine != 'arm64'"} +tensorflow-cpu = {version = "^2.9.1", markers = "sys_platform != 'darwin' or platform_machine != 'arm64'"} @@ -34 +34,2 @@ tfrecord = "^1.14.1" -torchaudio = "^0.13.1" +torch = { url = "https://download.pytorch.org/whl/cpu/torch-1.13.1%2Bcpu-cp39-cp39-linux_x86_64.whl" } +torchaudio = { url = "https://download.pytorch.org/whl/cpu/torchaudio-0.13.1%2Bcpu-cp39-cp39-linux_x86_64.whl" }
37f53b1a2b57a23772cbeeee26877d98ab36e509
Sylvain Lesage
2023-01-19T10:34:08
feat: 🎸 add /sizes (#679)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 118f1807..bcecf7fb 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-3ff0a9c" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-ca635d8" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-3ff0a9c", - "api": "huggingface/datasets-server-services-api:sha-3ff0a9c" + "admin": "huggingface/datasets-server-services-admin:sha-ca635d8", + "api": "huggingface/datasets-server-services-api:sha-ca635d8" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-2c8e163" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-ca635d8" diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index c483168b..c71fdca5 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -105,0 +106,2 @@ parquetAndDatasetInfo: + # the maximum size of the supported datasets. Bigger datasets, or datasets that cannot provide the size, are ignored. + maxDatasetSize: "500_000_000" # support up to 500 MB @@ -127,0 +130,8 @@ datasetInfo: + +sizes: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index a3b9ae5b..5029f41c 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -267,0 +268,17 @@ datasetInfo: + +sizes: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 2 + nodeSelector: + role-datasets-server: "true" + replicas: 2 + resources: + requests: + cpu: 1 + memory: "0.1Gi" + limits: + cpu: 2 + memory: "1Gi" + tolerations: [] diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index b9701050..c7778403 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -97,0 +98,5 @@ app: "{{ include "release" . }}-worker-dataset-info" +{{- define "labels.sizes" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-worker-sizes" +{{- end -}} + diff --git a/chart/templates/worker/sizes/_container.tpl b/chart/templates/worker/sizes/_container.tpl new file mode 100644 index 00000000..4f22f6af --- /dev/null +++ b/chart/templates/worker/sizes/_container.tpl @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerWorkerSizes" -}} +- name: "{{ include "name" . }}-worker-sizes" + image: {{ .Values.dockerImage.workers.datasets_based }} + imagePullPolicy: {{ .Values.docker.pullPolicy }} + env: + - name: DATASETS_BASED_ENDPOINT + value: "/sizes" + # ^ hard-coded + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} + # overridden + value: {{ .Values.sizes.queue.maxJobsPerNamespace | quote }} + securityContext: + allowPrivilegeEscalation: false + resources: {{ toYaml .Values.sizes.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/worker/sizes/deployment.yaml b/chart/templates/worker/sizes/deployment.yaml new file mode 100644 index 00000000..67452bba --- /dev/null +++ b/chart/templates/worker/sizes/deployment.yaml @@ -0,0 +1,26 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.sizes" . | nindent 4 }} + name: "{{ include "release" . }}-worker-sizes" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.sizes.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.sizes" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + labels: {{ include "labels.sizes" . | nindent 8 }} + spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + containers: {{ include "containerWorkerSizes" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.sizes.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.sizes.tolerations | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index b36dac06..313687f7 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -305,0 +306,14 @@ datasetInfo: + +sizes: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] diff --git a/e2e/tests/test_11_auth.py b/e2e/tests/test_11_auth.py index 4df95333..b8684ef9 100644 --- a/e2e/tests/test_11_auth.py +++ b/e2e/tests/test_11_auth.py @@ -46,0 +47 @@ def test_auth_e2e( + f"/sizes?dataset={dataset}", diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 5377ab4d..708b5d7e 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -318 +318 @@ name = "libcommon" -version = "0.6.4" +version = "0.6.6" @@ -336 +336 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" @@ -861 +861 @@ python-versions = "3.9.15" -content-hash = "0bcebdd4352e192c5057e78bee0717aacfd5b091cb6cd6ad40edf2424df647e4" +content-hash = "63b2a3d951b41381ea9d1592aa7dbb8ade73969bb73647aa5998222a8d29d79e" @@ -1106 +1106 @@ libcommon = [ - {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, + {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 49dacf39..962e5974 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } diff --git a/libs/libcommon/dist/libcommon-0.6.5-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.5-py3-none-any.whl new file mode 100644 index 00000000..876ea6f7 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.5-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.5.tar.gz b/libs/libcommon/dist/libcommon-0.6.5.tar.gz new file mode 100644 index 00000000..0b1a3ea8 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.5.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl new file mode 100644 index 00000000..3098e18b Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.6.tar.gz b/libs/libcommon/dist/libcommon-0.6.6.tar.gz new file mode 100644 index 00000000..9b5ff70a Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.6.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 54523410..dc8c4404 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.6.4" +version = "0.6.6" diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index 084f1d12..8c87617e 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -150,0 +151 @@ class ProcessingGraphConfig: + "/sizes": {"input_type": "dataset", "requires": "/parquet-and-dataset-info"}, diff --git a/libs/libcommon/tests/test_processing_steps.py b/libs/libcommon/tests/test_processing_steps.py index 6123f99a..d101f080 100644 --- a/libs/libcommon/tests/test_processing_steps.py +++ b/libs/libcommon/tests/test_processing_steps.py @@ -15,0 +16 @@ def test_default_graph(): + sizes = graph.get_step("/sizes") @@ -21,0 +23 @@ def test_default_graph(): + assert sizes is not None @@ -27,0 +30 @@ def test_default_graph(): + assert sizes.parent is parquet_and_dataset_info @@ -31 +34 @@ def test_default_graph(): - assert parquet_and_dataset_info.children == [parquet, dataset_info] + assert parquet_and_dataset_info.children == [parquet, dataset_info, sizes] @@ -33,0 +37 @@ def test_default_graph(): + assert sizes.children == [] @@ -39,0 +44 @@ def test_default_graph(): + assert sizes.get_ancestors() == [parquet_and_dataset_info] diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 5081f356..dab64664 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -354 +354 @@ name = "libcommon" -version = "0.6.4" +version = "0.6.6" @@ -372 +372 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" @@ -995 +995 @@ python-versions = "3.9.15" -content-hash = "d4bd5baac88d3873fbf416c00ccdc8f224f4941ea0e81e176af32981b9128913" +content-hash = "93227842402cf05aad83e31f6aa4a55804a8073f25a214ac34d13c818e84ef0b" @@ -1252 +1252 @@ libcommon = [ - {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, + {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 496ad15c..41911fc1 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 4d2956c6..51a415e9 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -370 +370 @@ name = "libcommon" -version = "0.6.4" +version = "0.6.6" @@ -388 +388 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" @@ -1029 +1029 @@ python-versions = "3.9.15" -content-hash = "e0c8cefca08354a6e349d28076b13d5185cc9333350e3df742db512293063e8d" +content-hash = "d87f7385b9af864a5f8846bab395f89f25d74976984bdeafac2e83911524cc66" @@ -1290 +1290 @@ libcommon = [ - {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, + {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index d793a29e..d3fe8d38 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 2e8b5065..e64091ca 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -165,0 +166,13 @@ services: + worker-sizes: + # build: + # context: .. + # dockerfile: workers/dataset_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + DATASETS_BASED_ENDPOINT: "/sizes" # hard-coded + depends_on: + - mongodb + restart: always diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 07d27b95..4a5c03f1 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -876 +876 @@ name = "libcommon" -version = "0.6.4" +version = "0.6.6" @@ -894 +894 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl" @@ -2497 +2497 @@ python-versions = "3.9.15" -content-hash = "d83bcdcbaefeb20ef577109e64bb0f7a0975628350c85357b57a494fb26eceff" +content-hash = "c1d20085c45680ac61d073768cd0111aaf366065d3db2c18ab94b2dbb9bf3065" @@ -3401 +3401 @@ libcommon = [ - {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, + {file = "libcommon-0.6.6-py3-none-any.whl", hash = "sha256:eaba34848bb56448370b8719774e92ab597ad4d33f9d35d4a60cf6adcededd49"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 5f198bac..be537172 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.6-py3-none-any.whl", develop = false } diff --git a/workers/datasets_based/src/datasets_based/worker_factory.py b/workers/datasets_based/src/datasets_based/worker_factory.py index 4924b6a3..c27e220a 100644 --- a/workers/datasets_based/src/datasets_based/worker_factory.py +++ b/workers/datasets_based/src/datasets_based/worker_factory.py @@ -13,0 +14 @@ from datasets_based.workers.parquet_and_dataset_info import ParquetAndDatasetInf +from datasets_based.workers.sizes import SizesWorker @@ -38,0 +40,2 @@ class DatasetBasedWorkerFactory(WorkerFactory): + elif job_type == SizesWorker.get_job_type(): + return SizesWorker(job_info=job_info, app_config=self.app_config) @@ -45,0 +49 @@ class DatasetBasedWorkerFactory(WorkerFactory): + SizesWorker.get_job_type(), diff --git a/workers/datasets_based/src/datasets_based/workers/dataset_info.py b/workers/datasets_based/src/datasets_based/workers/dataset_info.py index 9a13fcc1..e1b24210 100644 --- a/workers/datasets_based/src/datasets_based/workers/dataset_info.py +++ b/workers/datasets_based/src/datasets_based/workers/dataset_info.py @@ -61 +61 @@ def compute_dataset_info_response(dataset: str) -> DatasetInfoResponse: - `DatasetInfoResponse`: An object with the parquet_response (list of parquet files). + `DatasetInfoResponse`: An object with the dataset_info response. @@ -95 +95 @@ class DatasetInfoWorker(Worker): - return "2.0.0" + return "1.0.0" diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index df846dfc..26475bde 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -98 +98 @@ class ParquetWorker(Worker): - return "2.0.0" + return "3.0.0" diff --git a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py index 7205ec2f..7a0a554f 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py @@ -657 +657 @@ class ParquetAndDatasetInfoWorker(DatasetsBasedWorker): - return "2.0.0" + return "1.0.0" diff --git a/workers/datasets_based/src/datasets_based/workers/sizes.py b/workers/datasets_based/src/datasets_based/workers/sizes.py new file mode 100644 index 00000000..cd848e6f --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/sizes.py @@ -0,0 +1,200 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from http import HTTPStatus +from typing import Any, Literal, Mapping, Optional, TypedDict + +from libcommon.dataset import DatasetNotFoundError +from libcommon.exceptions import CustomError +from libcommon.simple_cache import DoesNotExist, SplitFullName, get_response + +from datasets_based.config import AppConfig +from datasets_based.worker import JobInfo, Worker + +SizesWorkerErrorCode = Literal[ + "PreviousStepStatusError", + "PreviousStepFormatError", +] + + +class DatasetSize(TypedDict): + dataset: str + num_bytes_original_files: int + num_bytes_parquet_files: int + num_bytes_memory: int + num_rows: int + + +class ConfigSize(TypedDict): + dataset: str + config: str + num_bytes_original_files: int + num_bytes_parquet_files: int + num_bytes_memory: int + num_rows: int + num_columns: int + + +class SplitSize(TypedDict): + dataset: str + config: str + split: str + num_bytes_parquet_files: int + num_bytes_memory: int + num_rows: int + num_columns: int + + +class SizesContent(TypedDict): + dataset: DatasetSize + configs: list[ConfigSize] + splits: list[SplitSize] + + +class SizesResponse(TypedDict): + sizes: SizesContent + + +class SizesWorkerError(CustomError): + """Base class for exceptions in this module.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: SizesWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__(message, status_code, str(code), cause, disclose_cause) + + +class PreviousStepStatusError(SizesWorkerError): + """Raised when the previous step gave an error. The job should not have been created.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "PreviousStepStatusError", cause, False) + + +class PreviousStepFormatError(SizesWorkerError): + """Raised when the content of the previous step has not the expected format.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "PreviousStepFormatError", cause, False) + + +def compute_sizes_response(dataset: str) -> SizesResponse: + """ + Get the response of /sizes for one specific dataset on huggingface.co. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + Returns: + `SizesResponse`: An object with the sizes_response. + <Tip> + Raises the following errors: + - [`~sizes.worker.PreviousStepStatusError`] + If the the previous step gave an error. + - [`~sizes.worker.PreviousStepFormatError`] + If the content of the previous step has not the expected format + </Tip> + """ + logging.info(f"get sizes for dataset={dataset}") + + try: + response = get_response(kind="/parquet-and-dataset-info", dataset=dataset) + except DoesNotExist as e: + raise DatasetNotFoundError("No response found in previous step for this dataset.", e) from e + if response["http_status"] != HTTPStatus.OK: + raise PreviousStepStatusError( + f"Previous step gave an error: {response['http_status']}. This job should not have been created." + ) + content = response["content"] + try: + split_sizes: list[SplitSize] = [] + config_sizes: list[ConfigSize] = [] + for config in content["dataset_info"].keys(): + config_dataset_info = content["dataset_info"][config] + num_columns = len(config_dataset_info["features"]) + config_split_sizes: list[SplitSize] = [ + { + "dataset": dataset, + "config": config, + "split": split_info["name"], + "num_bytes_parquet_files": sum( + x["size"] + for x in content["parquet_files"] + if x["config"] == config and x["split"] == split_info["name"] + ), + "num_bytes_memory": split_info["num_bytes"], + "num_rows": split_info["num_examples"], + "num_columns": num_columns, + } + for split_info in config_dataset_info["splits"].values() + ] + config_sizes.append( + { + "dataset": dataset, + "config": config, + "num_bytes_original_files": config_dataset_info["download_size"], + "num_bytes_parquet_files": sum( + split_size["num_bytes_parquet_files"] for split_size in config_split_sizes + ), + "num_bytes_memory": sum( + split_size["num_bytes_memory"] for split_size in config_split_sizes + ), # or "num_bytes_memory": config_dataset_info["dataset_size"], + "num_rows": sum(split_size["num_rows"] for split_size in config_split_sizes), + "num_columns": len(config_dataset_info["features"]), + } + ) + split_sizes.extend(config_split_sizes) + dataset_size: DatasetSize = { + "dataset": dataset, + "num_bytes_original_files": sum(config_size["num_bytes_original_files"] for config_size in config_sizes), + "num_bytes_parquet_files": sum(config_size["num_bytes_parquet_files"] for config_size in config_sizes), + "num_bytes_memory": sum(config_size["num_bytes_memory"] for config_size in config_sizes), + "num_rows": sum(config_size["num_rows"] for config_size in config_sizes), + } + except Exception as e: + raise PreviousStepFormatError("Previous step did not return the expected content.", e) from e + + return { + "sizes": { + "dataset": dataset_size, + "configs": config_sizes, + "splits": split_sizes, + } + } + + +class SizesWorker(Worker): + @staticmethod + def get_job_type() -> str: + return "/sizes" + + @staticmethod + def get_version() -> str: + return "1.0.0" + + def __init__(self, job_info: JobInfo, app_config: AppConfig) -> None: + job_type = job_info["type"] + try: + processing_step = app_config.processing_graph.graph.get_step_by_job_type(job_type) + except ValueError as e: + raise ValueError( + f"Unsupported job type: '{job_type}'. The job types declared in the processing graph are:" + f" {[step.job_type for step in app_config.processing_graph.graph.steps.values()]}" + ) from e + super().__init__(job_info=job_info, common_config=app_config.common, processing_step=processing_step) + + def compute(self) -> Mapping[str, Any]: + return compute_sizes_response(dataset=self.dataset) + + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return { + SplitFullName(dataset=split_size["dataset"], config=split_size["config"], split=split_size["split"]) + for split_size in content["sizes"]["splits"] + } diff --git a/workers/datasets_based/tests/test_worker_factory.py b/workers/datasets_based/tests/test_worker_factory.py index 52b95bc8..ea035989 100644 --- a/workers/datasets_based/tests/test_worker_factory.py +++ b/workers/datasets_based/tests/test_worker_factory.py @@ -20,0 +21 @@ from datasets_based.worker_factory import DatasetBasedWorkerFactory + ("/sizes", "SizesWorker"), diff --git a/workers/datasets_based/tests/workers/test_sizes.py b/workers/datasets_based/tests/workers/test_sizes.py new file mode 100644 index 00000000..a132baae --- /dev/null +++ b/workers/datasets_based/tests/workers/test_sizes.py @@ -0,0 +1,240 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from http import HTTPStatus +from typing import Any + +import pytest +from libcommon.simple_cache import _clean_cache_database, upsert_response + +from datasets_based.config import AppConfig +from datasets_based.workers.sizes import ( + DatasetNotFoundError, + PreviousStepFormatError, + PreviousStepStatusError, + SizesWorker, +) + + [email protected](autouse=True) +def clean_mongo_database(app_config: AppConfig) -> None: + _clean_cache_database() + + +def get_worker(dataset: str, app_config: AppConfig, force: bool = False) -> SizesWorker: + return SizesWorker( + job_info={ + "type": SizesWorker.get_job_type(), + "dataset": dataset, + "config": None, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + ) + + [email protected]( + "dataset,upstream_status,upstream_content,expected_error_code,expected_content,should_raise", + [ + ( + "dataset_ok", + HTTPStatus.OK, + { + "parquet_files": [ + {"dataset": "dataset_ok", "config": "config_1", "split": "train", "size": 14281188}, + {"dataset": "dataset_ok", "config": "config_1", "split": "test", "size": 2383903}, + {"dataset": "dataset_ok", "config": "config_2", "split": "train", "size": 1234}, + {"dataset": "dataset_ok", "config": "config_2", "split": "train", "size": 6789}, + {"dataset": "dataset_ok", "config": "config_2", "split": "test", "size": 2383903}, + ], + "dataset_info": { + "config_1": { + "features": { + "image": {"_type": "Image"}, + "label": { + "names": ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"], + "_type": "ClassLabel", + }, + }, + "splits": { + "train": { + "name": "train", + "num_bytes": 17470800, + "num_examples": 60000, + "dataset_name": "dataset_ok", + }, + "test": { + "name": "test", + "num_bytes": 2916432, + "num_examples": 10000, + "dataset_name": "dataset_ok", + }, + }, + "download_checksums": { + "https://storage.googleapis.com/cvdf-datasets/mnist/train-images-idx3-ubyte.gz": { + "num_bytes": 9912422, + "checksum": "440fcabf73cc546fa21475e81ea370265605f56be210a4024d2ca8f203523609", + }, + "https://storage.googleapis.com/cvdf-datasets/mnist/train-labels-idx1-ubyte.gz": { + "num_bytes": 28881, + "checksum": "3552534a0a558bbed6aed32b30c495cca23d567ec52cac8be1a0730e8010255c", + }, + "https://storage.googleapis.com/cvdf-datasets/mnist/t10k-images-idx3-ubyte.gz": { + "num_bytes": 1648877, + "checksum": "8d422c7b0a1c1c79245a5bcf07fe86e33eeafee792b84584aec276f5a2dbc4e6", + }, + "https://storage.googleapis.com/cvdf-datasets/mnist/t10k-labels-idx1-ubyte.gz": { + "num_bytes": 4542, + "checksum": "f7ae60f92e00ec6debd23a6088c31dbd2371eca3ffa0defaefb259924204aec6", + }, + }, + "download_size": 11594722, + "dataset_size": 20387232, + "size_in_bytes": 31981954, + }, + "config_2": { + "features": { + "image": {"_type": "Image"}, + "image2": {"_type": "Image"}, + "label": { + "names": ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"], + "_type": "ClassLabel", + }, + }, + "splits": { + "train": { + "name": "train", + "num_bytes": 5678, + "num_examples": 3000, + "dataset_name": "dataset_ok", + }, + "test": { + "name": "test", + "num_bytes": 1234, + "num_examples": 1000, + "dataset_name": "dataset_ok", + }, + }, + "download_checksums": { + "https://storage.googleapis.com/cvdf-datasets/mnist/train-images-idx3-ubyte.gz": { + "num_bytes": 9912422, + "checksum": "440fcabf73cc546fa21475e81ea370265605f56be210a4024d2ca8f203523609", + }, + }, + "download_size": 9912422, + "dataset_size": 6912, + "size_in_bytes": 9919334, + }, + }, + }, + None, + { + "sizes": { + "dataset": { + "dataset": "dataset_ok", + "num_bytes_original_files": 21507144, + "num_bytes_parquet_files": 19057017, + "num_bytes_memory": 20394144, + "num_rows": 74000, + }, + "configs": [ + { + "dataset": "dataset_ok", + "config": "config_1", + "num_bytes_original_files": 11594722, + "num_bytes_parquet_files": 16665091, + "num_bytes_memory": 20387232, + "num_rows": 70000, + "num_columns": 2, + }, + { + "dataset": "dataset_ok", + "config": "config_2", + "num_bytes_original_files": 9912422, + "num_bytes_parquet_files": 2391926, + "num_bytes_memory": 6912, + "num_rows": 4000, + "num_columns": 3, + }, + ], + "splits": [ + { + "dataset": "dataset_ok", + "config": "config_1", + "split": "train", + "num_bytes_parquet_files": 14281188, + "num_bytes_memory": 17470800, + "num_rows": 60000, + "num_columns": 2, + }, + { + "dataset": "dataset_ok", + "config": "config_1", + "split": "test", + "num_bytes_parquet_files": 2383903, + "num_bytes_memory": 2916432, + "num_rows": 10000, + "num_columns": 2, + }, + { + "dataset": "dataset_ok", + "config": "config_2", + "split": "train", + "num_bytes_parquet_files": 8023, + "num_bytes_memory": 5678, + "num_rows": 3000, + "num_columns": 3, + }, + { + "dataset": "dataset_ok", + "config": "config_2", + "split": "test", + "num_bytes_parquet_files": 2383903, + "num_bytes_memory": 1234, + "num_rows": 1000, + "num_columns": 3, + }, + ], + } + }, + False, + ), + ("status_error", HTTPStatus.NOT_FOUND, {"error": "error"}, PreviousStepStatusError.__name__, None, True), + ( + "format_error", + HTTPStatus.OK, + {"not_dataset_info": "wrong_format"}, + PreviousStepFormatError.__name__, + None, + True, + ), + ], +) +def test_compute( + app_config: AppConfig, + dataset: str, + upstream_status: HTTPStatus, + upstream_content: Any, + expected_error_code: str, + expected_content: Any, + should_raise: bool, +) -> None: + upsert_response( + kind="/parquet-and-dataset-info", dataset=dataset, content=upstream_content, http_status=upstream_status + ) + worker = get_worker(dataset=dataset, app_config=app_config) + if should_raise: + with pytest.raises(Exception) as e: + worker.compute() + assert e.type.__name__ == expected_error_code + else: + assert worker.compute() == expected_content + + +def test_doesnotexist(app_config: AppConfig) -> None: + dataset = "doesnotexist" + worker = get_worker(dataset=dataset, app_config=app_config) + with pytest.raises(DatasetNotFoundError): + worker.compute()
b3ac6a16ad520b18997d1ee1304811f77707499e
Sylvain Lesage
2023-01-18T21:26:25
Create endpoint /dataset-info (#670)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 441cb039..c22f7330 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -63 +63 @@ jobs: - PARQUET_COMMITTER_HF_TOKEN: "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" + PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN: "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" @@ -70,0 +71 @@ jobs: + COMMON_LOG_LEVEL: "DEBUG" @@ -83 +84 @@ jobs: - PARQUET_COMMITTER_HF_TOKEN: "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" + PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN: "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" @@ -90,0 +92 @@ jobs: + COMMON_LOG_LEVEL: "DEBUG" diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index dfd331f3..71c15904 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -55,3 +54,0 @@ jobs: - - name: Run pip-audit (datasets worker) - if: ${{ inputs.is-datasets-worker == true }} - run: bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" @@ -59,2 +56 @@ jobs: - if: ${{ inputs.is-datasets-worker == false && inputs.is-library-with-pymongo == false }} - run: bash -c "poetry run pip-audit --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" + run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.2 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^hffs @/d')" diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index fe5ac168..118f1807 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-6a36caa" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-3ff0a9c" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-6a36caa", - "api": "huggingface/datasets-server-services-api:sha-a36b651" + "admin": "huggingface/datasets-server-services-admin:sha-3ff0a9c", + "api": "huggingface/datasets-server-services-api:sha-3ff0a9c" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-6a36caa" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-2c8e163" diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index 0e0b0499..c483168b 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -17 +17 @@ secrets: - fromSecret: false + fromSecret: true @@ -104,0 +105,8 @@ firstRows: +parquetAndDatasetInfo: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 + @@ -111,0 +120,8 @@ parquet: + +datasetInfo: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index dce0f63c..a3b9ae5b 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -212 +212 @@ firstRows: -parquet: +parquetAndDatasetInfo: @@ -233,0 +234,34 @@ parquet: + +parquet: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 2 + nodeSelector: + role-datasets-server: "true" + replicas: 2 + resources: + requests: + cpu: 1 + memory: "0.1Gi" + limits: + cpu: 2 + memory: "1Gi" + tolerations: [] + +datasetInfo: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 2 + nodeSelector: + role-datasets-server: "true" + replicas: 2 + resources: + requests: + cpu: 1 + memory: "0.1Gi" + limits: + cpu: 2 + memory: "1Gi" + tolerations: [] diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index 3d9add6e..b9701050 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -82,0 +83,5 @@ app: "{{ include "release" . }}-worker-first-rows" +{{- define "labels.parquetAndDatasetInfo" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-worker-parquet-and-dataset-info" +{{- end -}} + @@ -87,0 +93,5 @@ app: "{{ include "release" . }}-worker-parquet" +{{- define "labels.datasetInfo" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-worker-dataset-info" +{{- end -}} + diff --git a/chart/templates/worker/dataset-info/_container.tpl b/chart/templates/worker/dataset-info/_container.tpl new file mode 100644 index 00000000..2b45ea87 --- /dev/null +++ b/chart/templates/worker/dataset-info/_container.tpl @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerWorkerDatasetInfo" -}} +- name: "{{ include "name" . }}-worker-dataset-info" + image: {{ .Values.dockerImage.workers.datasets_based }} + imagePullPolicy: {{ .Values.docker.pullPolicy }} + env: + - name: DATASETS_BASED_ENDPOINT + value: "/dataset-info" + # ^ hard-coded + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} + # overridden + value: {{ .Values.datasetInfo.queue.maxJobsPerNamespace | quote }} + securityContext: + allowPrivilegeEscalation: false + resources: {{ toYaml .Values.datasetInfo.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/worker/dataset-info/deployment.yaml b/chart/templates/worker/dataset-info/deployment.yaml new file mode 100644 index 00000000..a802449f --- /dev/null +++ b/chart/templates/worker/dataset-info/deployment.yaml @@ -0,0 +1,26 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.datasetInfo" . | nindent 4 }} + name: "{{ include "release" . }}-worker-dataset-info" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.datasetInfo.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.datasetInfo" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + labels: {{ include "labels.datasetInfo" . | nindent 8 }} + spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + containers: {{ include "containerWorkerDatasetInfo" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.datasetInfo.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.datasetInfo.tolerations | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/worker/parquet-and-dataset-info/_container.tpl b/chart/templates/worker/parquet-and-dataset-info/_container.tpl new file mode 100644 index 00000000..f964f01a --- /dev/null +++ b/chart/templates/worker/parquet-and-dataset-info/_container.tpl @@ -0,0 +1,52 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerWorkerParquetAndDatasetInfo" -}} +- name: "{{ include "name" . }}-worker-parquet-and-dataset-info" + image: {{ .Values.dockerImage.workers.datasets_based }} + imagePullPolicy: {{ .Values.docker.pullPolicy }} + env: + - name: DATASETS_BASED_ENDPOINT + value: "/parquet-and-dataset-info" + # ^ hard-coded + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} + {{ include "envDatasetsBased" . | nindent 2 }} + - name: DATASETS_BASED_HF_DATASETS_CACHE + value: {{ printf "%s/parquet-and-dataset-info/datasets" .Values.cacheDirectory | quote }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} + # overridden + value: {{ .Values.parquetAndDatasetInfo.queue.maxJobsPerNamespace | quote }} + - name: PARQUET_AND_DATASET_INFO_BLOCKED_DATASETS + value: {{ .Values.parquetAndDatasetInfo.blockedDatasets | quote }} + - name: PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE + value: {{ .Values.parquetAndDatasetInfo.commitMessage | quote }} + - name: PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN + {{- if .Values.secrets.userHfToken.fromSecret }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.userHfToken.secretName | quote }} + key: HF_TOKEN + optional: false + {{- else }} + value: {{ .Values.secrets.userHfToken.value }} + {{- end }} + - name: PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE + value: {{ .Values.parquetAndDatasetInfo.maxDatasetSize | quote }} + - name: PARQUET_AND_DATASET_INFO_SOURCE_REVISION + value: {{ .Values.parquetAndDatasetInfo.sourceRevision | quote }} + - name: PARQUET_AND_DATASET_INFO_SUPPORTED_DATASETS + value: {{ .Values.parquetAndDatasetInfo.supportedDatasets | quote }} + - name: PARQUET_AND_DATASET_INFO_TARGET_REVISION + value: {{ .Values.parquetAndDatasetInfo.targetRevision | quote }} + - name: PARQUET_AND_DATASET_INFO_URL_TEMPLATE + value: {{ .Values.parquetAndDatasetInfo.urlTemplate | quote }} + volumeMounts: + {{ include "volumeMountCache" . | nindent 2 }} + securityContext: + allowPrivilegeEscalation: false + resources: {{ toYaml .Values.parquetAndDatasetInfo.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/worker/parquet-and-dataset-info/deployment.yaml b/chart/templates/worker/parquet-and-dataset-info/deployment.yaml new file mode 100644 index 00000000..564693fe --- /dev/null +++ b/chart/templates/worker/parquet-and-dataset-info/deployment.yaml @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.parquetAndDatasetInfo" . | nindent 4 }} + name: "{{ include "release" . }}-worker-parquet-and-dataset-info" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.parquetAndDatasetInfo.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.parquetAndDatasetInfo" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + labels: {{ include "labels.parquetAndDatasetInfo" . | nindent 8 }} + spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: + {{ include "initContainerCache" . | nindent 8 }} + containers: {{ include "containerWorkerParquetAndDatasetInfo" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.parquetAndDatasetInfo.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.parquetAndDatasetInfo.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/worker/parquet/_container.tpl b/chart/templates/worker/parquet/_container.tpl index 1f2d7d1b..febf15ad 100644 --- a/chart/templates/worker/parquet/_container.tpl +++ b/chart/templates/worker/parquet/_container.tpl @@ -16,3 +15,0 @@ - {{ include "envDatasetsBased" . | nindent 2 }} - - name: DATASETS_BASED_HF_DATASETS_CACHE - value: {{ printf "%s/parquet/datasets" .Values.cacheDirectory | quote }} @@ -23,26 +19,0 @@ - - name: PARQUET_BLOCKED_DATASETS - value: {{ .Values.parquet.blockedDatasets | quote }} - - name: PARQUET_COMMIT_MESSAGE - value: {{ .Values.parquet.commitMessage | quote }} - - name: PARQUET_COMMITTER_HF_TOKEN - {{- if .Values.secrets.userHfToken.fromSecret }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.userHfToken.secretName | quote }} - key: HF_TOKEN - optional: false - {{- else }} - value: {{ .Values.secrets.userHfToken.value }} - {{- end }} - - name: PARQUET_MAX_DATASET_SIZE - value: {{ .Values.parquet.maxDatasetSize | quote }} - - name: PARQUET_SOURCE_REVISION - value: {{ .Values.parquet.sourceRevision | quote }} - - name: PARQUET_SUPPORTED_DATASETS - value: {{ .Values.parquet.supportedDatasets | quote }} - - name: PARQUET_TARGET_REVISION - value: {{ .Values.parquet.targetRevision | quote }} - - name: PARQUET_URL_TEMPLATE - value: {{ .Values.parquet.urlTemplate | quote }} - volumeMounts: - {{ include "volumeMountCache" . | nindent 2 }} diff --git a/chart/templates/worker/parquet/deployment.yaml b/chart/templates/worker/parquet/deployment.yaml index db27dd94..dccd619e 100644 --- a/chart/templates/worker/parquet/deployment.yaml +++ b/chart/templates/worker/parquet/deployment.yaml @@ -23,2 +22,0 @@ spec: - initContainers: - {{ include "initContainerCache" . | nindent 8 }} @@ -28 +25,0 @@ spec: - volumes: {{ include "volumeData" . | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index f58427a1..b36dac06 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -251 +251 @@ firstRows: -parquet: +parquetAndDatasetInfo: @@ -277,0 +278,28 @@ parquet: + +parquet: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] + +datasetInfo: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] diff --git a/e2e/Makefile b/e2e/Makefile index 3afd578e..7b97af7c 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -8 +8 @@ export COMMON_HF_TOKEN := hf_app_datasets-server_token -export PARQUET_COMMITTER_HF_TOKEN := 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 +export PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN := 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 @@ -14,0 +15 @@ export API_UVICORN_NUM_WORKERS := 2 +export COMMON_LOG_LEVEL := DEBUG @@ -21 +21,0 @@ include ../tools/Python.mk -include ../tools/PythonAudit.mk diff --git a/e2e/poetry.lock b/e2e/poetry.lock index 27642a87..711adadb 100644 --- a/e2e/poetry.lock +++ b/e2e/poetry.lock @@ -3 +3 @@ name = "attrs" -version = "22.1.0" +version = "22.2.0" @@ -7 +7 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" @@ -10,4 +10,5 @@ python-versions = ">=3.5" -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] @@ -36 +37 @@ name = "black" -version = "22.10.0" +version = "22.12.0" @@ -83 +84 @@ name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" @@ -87,4 +88 @@ optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = "*" @@ -124 +122 @@ name = "cyclonedx-python-lib" -version = "3.1.1" +version = "3.1.5" @@ -138 +136 @@ name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" @@ -149 +147 @@ name = "filelock" -version = "3.8.1" +version = "3.9.0" @@ -156,2 +154,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -185 +183 @@ name = "gitpython" -version = "3.1.29" +version = "3.1.30" @@ -249,2 +247,2 @@ name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" @@ -253 +251 @@ optional = false -python-versions = "*" +python-versions = ">=3.7" @@ -257 +255 @@ name = "isort" -version = "5.10.1" +version = "5.11.4" @@ -261 +259 @@ optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" @@ -380 +378 @@ name = "packaging" -version = "21.3" +version = "23.0" @@ -384,4 +382 @@ optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" @@ -391 +386 @@ name = "pathspec" -version = "0.10.2" +version = "0.10.3" @@ -399 +394 @@ name = "pbr" -version = "5.11.0" +version = "5.11.1" @@ -426 +421 @@ name = "pip-audit" -version = "2.4.7" +version = "2.4.13" @@ -436 +431 @@ html5lib = ">=1.1" -packaging = ">=21.0.0" +packaging = ">=23.0.0" @@ -438 +433 @@ pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" +pip-requirements-parser = ">=32.0.0" @@ -445 +440 @@ dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] @@ -450 +445 @@ name = "pip-requirements-parser" -version = "31.2.0" +version = "32.0.1" @@ -454 +449 @@ optional = false -python-versions = ">=3.6.*" +python-versions = ">=3.6.0" @@ -457,0 +453 @@ packaging = "*" +pyparsing = "*" @@ -461 +457 @@ docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] +testing = ["aboutcode-toolkit (>=6.0.0)", "black", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] @@ -465 +461 @@ name = "platformdirs" -version = "2.5.4" +version = "2.6.2" @@ -472,2 +468,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -516 +512 @@ name = "pygments" -version = "2.13.0" +version = "2.14.0" @@ -538 +534 @@ name = "pyrsistent" -version = "0.19.2" +version = "0.19.3" @@ -546 +542 @@ name = "pytest" -version = "7.2.0" +version = "7.2.1" @@ -574 +570 @@ name = "requests" -version = "2.28.1" +version = "2.28.2" @@ -582 +578 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" @@ -606 +602 @@ name = "rich" -version = "12.6.0" +version = "13.1.0" @@ -610 +606 @@ optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" @@ -621 +617 @@ name = "setuptools" -version = "65.6.3" +version = "66.0.0" @@ -628 +624 @@ python-versions = ">=3.7" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -726 +722 @@ name = "urllib3" -version = "1.26.13" +version = "1.26.14" @@ -752,2 +748,2 @@ attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -760,21 +756,12 @@ black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, @@ -791,2 +778,88 @@ charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, @@ -807,2 +880,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, - {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, + {file = "cyclonedx_python_lib-3.1.5-py3-none-any.whl", hash = "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6"}, + {file = "cyclonedx_python_lib-3.1.5.tar.gz", hash = "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3"}, @@ -811,2 +884,2 @@ exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -815,2 +888,2 @@ filelock = [ - {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, - {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, @@ -827,2 +900,2 @@ gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, @@ -843,2 +916,2 @@ iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -847,2 +920,2 @@ isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, @@ -957,2 +1030,2 @@ packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -961,2 +1034,2 @@ pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, @@ -965,2 +1038,2 @@ pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, @@ -977,2 +1050,2 @@ pip-audit = [ - {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, - {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, + {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, + {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, @@ -981,2 +1054,2 @@ pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, + {file = "pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"}, + {file = "pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526"}, @@ -985,2 +1058,2 @@ platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, @@ -1005,2 +1078,2 @@ pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -1013,22 +1086,27 @@ pyrsistent = [ - {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win32.whl", hash = "sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win32.whl", hash = "sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73"}, - {file = "pyrsistent-0.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win32.whl", hash = "sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab"}, - {file = "pyrsistent-0.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win32.whl", hash = "sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291"}, - {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, - {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, @@ -1037,2 +1115,2 @@ pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -1083,2 +1161,2 @@ requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -1091,2 +1169,2 @@ rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, + {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -1095,2 +1173,2 @@ setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, + {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, @@ -1167,2 +1245,2 @@ urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, diff --git a/e2e/tests/test_11_auth.py b/e2e/tests/test_11_auth.py index 95bed754..4df95333 100644 --- a/e2e/tests/test_11_auth.py +++ b/e2e/tests/test_11_auth.py @@ -7,13 +7 @@ from .fixtures.hub import AuthHeaders, AuthType, DatasetRepos, DatasetReposType -from .utils import ( - Response, - get_default_config_split, - poll_first_rows, - poll_parquet, - poll_splits, - post_refresh, -) - - -def log(response: Response, dataset: str) -> str: - dataset, config, split = get_default_config_split(dataset) - return f"{response.status_code} - {response.text} - {dataset} - {config} - {split}" +from .utils import get_default_config_split, poll_until_ready_and_assert @@ -23 +11 @@ def log(response: Response, dataset: str) -> str: - "type,auth,webhook_status_code,response_status_code,error_code_splits,error_code_first_rows", + "type,auth,expected_status_code,expected_error_code", @@ -25,3 +13,3 @@ def log(response: Response, dataset: str) -> str: - ("public", "none", 200, 200, None, None), - ("public", "token", 200, 200, None, None), - ("public", "cookie", 200, 200, None, None), + ("public", "none", 200, None), + ("public", "token", 200, None), + ("public", "cookie", 200, None), @@ -30,3 +18,3 @@ def log(response: Response, dataset: str) -> str: - ("gated", "none", 200, 401, "ExternalUnauthenticatedError", "ExternalUnauthenticatedError"), - ("gated", "token", 200, 200, None, None), - ("gated", "cookie", 200, 200, None, None), + ("gated", "none", 401, "ExternalUnauthenticatedError"), + ("gated", "token", 200, None), + ("gated", "cookie", 200, None), @@ -35,3 +23,3 @@ def log(response: Response, dataset: str) -> str: - ("private", "none", 400, 401, "ExternalUnauthenticatedError", "ExternalUnauthenticatedError"), - ("private", "token", 400, 404, "ResponseNotFound", "ResponseNotFound"), - ("private", "cookie", 400, 404, "ResponseNotFound", "ResponseNotFound"), + ("private", "none", 401, "ExternalUnauthenticatedError"), + ("private", "token", 404, "ResponseNotFound"), + ("private", "cookie", 404, "ResponseNotFound"), @@ -40 +28 @@ def log(response: Response, dataset: str) -> str: -def test_split_public_auth( +def test_auth_e2e( @@ -45,4 +33,2 @@ def test_split_public_auth( - webhook_status_code: int, - response_status_code: int, - error_code_splits: str, - error_code_first_rows: str, + expected_status_code: int, + expected_error_code: str, @@ -49,0 +36 @@ def test_split_public_auth( + # TODO: add dataset with various splits, or various configs @@ -51,12 +38,17 @@ def test_split_public_auth( - r_webhook = post_refresh(dataset) - assert r_webhook.status_code == webhook_status_code, log(r_webhook, dataset) - r_splits = poll_splits(dataset, headers=auth_headers[auth]) - assert r_splits.status_code == response_status_code, log(r_splits, dataset) - assert r_splits.headers.get("X-Error-Code") == error_code_splits, log(r_splits, dataset) - r_rows = poll_first_rows(dataset, config, split, headers=auth_headers[auth]) - assert r_rows.status_code == response_status_code, log(r_rows, dataset) - assert r_rows.headers.get("X-Error-Code") == error_code_first_rows, log(r_rows, dataset) - r_parquet = poll_parquet(dataset, headers=auth_headers[auth]) - error_code_parquet = error_code_splits - assert r_parquet.status_code == response_status_code, log(r_parquet, dataset) - assert r_parquet.headers.get("X-Error-Code") == error_code_parquet, log(r_parquet, dataset) + headers = auth_headers[auth] + + # asking for the dataset will launch the jobs, without the need of a webhook + endpoints = [ + f"/splits?dataset={dataset}", + f"/first-rows?dataset={dataset}&config={config}&split={split}", + f"/parquet-and-dataset-info?dataset={dataset}", + f"/parquet?dataset={dataset}", + f"/dataset-info?dataset={dataset}", + ] + for endpoint in endpoints: + poll_until_ready_and_assert( + relative_url=endpoint, + expected_status_code=expected_status_code, + expected_error_code=expected_error_code, + headers=headers, + ) diff --git a/e2e/tests/utils.py b/e2e/tests/utils.py index ac6dd203..41e5fb4b 100644 --- a/e2e/tests/utils.py +++ b/e2e/tests/utils.py @@ -8 +8 @@ from pathlib import Path -from typing import Any, Mapping, Optional, Tuple +from typing import Any, Literal, Mapping, Optional, Tuple @@ -100,0 +101,39 @@ def get_default_config_split(dataset: str) -> Tuple[str, str, str]: +def post_webhook( + dataset: str, event: Literal["add", "remove", "update", "move"], movedTo: Optional[str] = None +) -> Response: + json: dict[str, Any] = {"event": event, "repo": {"type": "dataset", "name": dataset}} + if movedTo is not None: + json["movedTo"] = movedTo + return post("/webhook", json=json) + + +def log(response: Response, url: str) -> str: + return f"{response.status_code} - {response.headers} - {response.text} - {url}" + + +def poll_until_ready_and_assert( + relative_url: str, + expected_status_code: int, + expected_error_code: Optional[str], + headers: Headers = None, + url: str = URL, +) -> None: + if headers is None: + headers = {} + interval = INTERVAL + timeout = MAX_DURATION + retries = timeout // interval + should_retry = True + response = None + while retries > 0 and should_retry: + retries -= 1 + time.sleep(interval) + response = get(relative_url=relative_url, headers=headers, url=url) + print(response.headers.get("X-Error-Code")) + should_retry = response.headers.get("X-Error-Code") == "ResponseNotReady" + if retries == 0 or response is None: + raise RuntimeError("Poll timeout") + assert response.status_code == expected_status_code, log(response, url) + assert response.headers.get("X-Error-Code") == expected_error_code, log(response, url) + + diff --git a/jobs/mongodb_migration/Makefile b/jobs/mongodb_migration/Makefile index dbd4c0c6..71d907fd 100644 --- a/jobs/mongodb_migration/Makefile +++ b/jobs/mongodb_migration/Makefile @@ -11 +10,0 @@ include ../../tools/Python.mk -include ../../tools/PythonAudit.mk diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index b656e8a7..5377ab4d 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -11 +11 @@ name = "attrs" -version = "22.1.0" +version = "22.2.0" @@ -15 +15 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" @@ -18,4 +18,5 @@ python-versions = ">=3.5" -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] @@ -44 +45 @@ name = "black" -version = "22.10.0" +version = "22.12.0" @@ -91 +92 @@ name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" @@ -95,4 +96 @@ optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = "*" @@ -132 +130 @@ name = "coverage" -version = "6.5.0" +version = "7.0.5" @@ -143 +141 @@ name = "cyclonedx-python-lib" -version = "3.1.1" +version = "3.1.5" @@ -187 +185 @@ name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" @@ -198 +196 @@ name = "filelock" -version = "3.8.2" +version = "3.9.0" @@ -205,2 +203,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -234 +232 @@ name = "gitpython" -version = "3.1.29" +version = "3.1.30" @@ -298,2 +296,2 @@ name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" @@ -302 +300 @@ optional = false -python-versions = "*" +python-versions = ">=3.7" @@ -306 +304 @@ name = "isort" -version = "5.10.1" +version = "5.11.4" @@ -310 +308 @@ optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" @@ -320 +318 @@ name = "libcommon" -version = "0.6.0" +version = "0.6.4" @@ -338 +336 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" @@ -426 +424 @@ name = "orjson" -version = "3.8.3" +version = "3.8.5" @@ -446 +444 @@ name = "packaging" -version = "21.3" +version = "23.0" @@ -450,4 +448 @@ optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" @@ -457 +452 @@ name = "pathspec" -version = "0.10.2" +version = "0.10.3" @@ -465 +460 @@ name = "pbr" -version = "5.11.0" +version = "5.11.1" @@ -492 +487 @@ name = "pip-audit" -version = "2.4.7" +version = "2.4.13" @@ -502 +497 @@ html5lib = ">=1.1" -packaging = ">=21.0.0" +packaging = ">=23.0.0" @@ -504 +499 @@ pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" +pip-requirements-parser = ">=32.0.0" @@ -511 +506 @@ dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] @@ -516 +511 @@ name = "pip-requirements-parser" -version = "31.2.0" +version = "32.0.1" @@ -520 +515 @@ optional = false -python-versions = ">=3.6.*" +python-versions = ">=3.6.0" @@ -523,0 +519 @@ packaging = "*" +pyparsing = "*" @@ -527 +523 @@ docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] +testing = ["aboutcode-toolkit (>=6.0.0)", "black", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] @@ -531 +527 @@ name = "platformdirs" -version = "2.5.4" +version = "2.6.2" @@ -538,2 +534,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -593 +589 @@ name = "pygments" -version = "2.13.0" +version = "2.14.0" @@ -627 +623 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "main" +category = "dev" @@ -636 +632 @@ name = "pytest" -version = "7.2.0" +version = "7.2.1" @@ -691 +687 @@ name = "requests" -version = "2.28.1" +version = "2.28.2" @@ -699 +695 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" @@ -723 +719 @@ name = "rich" -version = "12.6.0" +version = "13.1.0" @@ -727 +723 @@ optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" @@ -738 +734 @@ name = "setuptools" -version = "65.6.3" +version = "66.0.0" @@ -745 +741 @@ python-versions = ">=3.7" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -843 +839 @@ name = "urllib3" -version = "1.26.13" +version = "1.26.14" @@ -865 +861 @@ python-versions = "3.9.15" -content-hash = "ac7869cb48f3730d996eb5a7aff9ef4ecaa9bd2a66804a9a4301aa8a6b27decf" +content-hash = "0bcebdd4352e192c5057e78bee0717aacfd5b091cb6cd6ad40edf2424df647e4" @@ -873,2 +869,2 @@ attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -881,21 +877,12 @@ black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, @@ -912,2 +899,88 @@ charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, @@ -928,50 +1001,51 @@ coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, @@ -980,2 +1054,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, - {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, + {file = "cyclonedx_python_lib-3.1.5-py3-none-any.whl", hash = "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6"}, + {file = "cyclonedx_python_lib-3.1.5.tar.gz", hash = "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3"}, @@ -992,2 +1066,2 @@ exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -996,2 +1070,2 @@ filelock = [ - {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"}, - {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, @@ -1008,2 +1082,2 @@ gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, @@ -1024,2 +1098,2 @@ iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1028,2 +1102,2 @@ isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, @@ -1032 +1106 @@ libcommon = [ - {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, + {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, @@ -1137,44 +1211,44 @@ orjson = [ - {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, - {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, - {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, - {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, - {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, - {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, - {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, - {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:143639b9898b094883481fac37733231da1c2ae3aec78a1dd8d3b58c9c9fceef"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:31f43e63e0d94784c55e86bd376df3f80b574bea8c0bc5ecd8041009fa8ec78a"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c802ea6d4a0d40f096aceb5e7ef0a26c23d276cb9334e1cadcf256bb090b6426"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf298b55b371c2772420c5ace4d47b0a3ea1253667e20ded3c363160fd0575f6"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68cb4a8501a463771d55bb22fc72795ec7e21d71ab083e000a2c3b651b6fb2af"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4f1427952b3bd92bfb63a61b7ffc33a9f54ec6de296fa8d924cbeba089866acb"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0a9f329468c8eb000742455b83546849bcd69495d6baa6e171c7ee8600a47bd"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6535d527aa1e4a757a6ce9b61f3dd74edc762e7d2c6991643aae7c560c8440bd"}, + {file = "orjson-3.8.5-cp310-none-win_amd64.whl", hash = "sha256:2eee64c028adf6378dd714c8debc96d5b92b6bb4862debb65ca868e59bac6c63"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f5745ff473dd5c6718bf8c8d5bc183f638b4f3e03c7163ffcda4d4ef453f42ff"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:544f1240b295083697027a5093ec66763218ff16f03521d5020e7a436d2e417b"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85c9c6bab97a831e7741089057347d99901b4db2451a076ca8adedc7d96297f"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bae7347764e7be6dada980fd071e865544c98317ab61af575c9cc5e1dc7e3fe"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f6f6e9d26a06b63126112a7bc8d8529df048d31df2a257a8484b76adf3e5d"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:758238364142fcbeca34c968beefc0875ffa10aa2f797c82f51cfb1d22d0934e"}, + {file = "orjson-3.8.5-cp311-none-win_amd64.whl", hash = "sha256:cc7579240fb88a626956a6cb4a181a11b62afbc409ce239a7b866568a2412fa2"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:79aa3e47cbbd4eedbbde4f988f766d6cf38ccb51d52cfabfeb6b8d1b58654d25"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2544cd0d089faa862f5a39f508ee667419e3f9e11f119a6b1505cfce0eb26601"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2be0025ca7e460bcacb250aba8ce0239be62957d58cf34045834cc9302611d3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b57bf72902d818506906e49c677a791f90dbd7f0997d60b14bc6c1ce4ce4cf9"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ae9832a11c6a9efa8c14224e5caf6e35046efd781de14e59eb69ab4e561cf3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:0e28330cc6d51741cad0edd1b57caf6c5531aff30afe41402acde0a03246b8ed"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:155954d725627b5480e6cc1ca488afb4fa685099a4ace5f5bf21a182fabf6706"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ece1b6ef9312df5d5274ca6786e613b7da7de816356e36bcad9ea8a73d15ab71"}, + {file = "orjson-3.8.5-cp37-none-win_amd64.whl", hash = "sha256:6f58d1f0702332496bc1e2d267c7326c851991b62cf6395370d59c47f9890007"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:933f4ab98362f46a59a6d0535986e1f0cae2f6b42435e24a55922b4bc872af0c"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:47a7ca236b25a138a74b2cb5169adcdc5b2b8abdf661de438ba65967a2cde9dc"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b573ca942c626fcf8a86be4f180b86b2498b18ae180f37b4180c2aced5808710"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9bab11611d5452efe4ae5315f5eb806f66104c08a089fb84c648d2e8e00f106"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee2f5f6476617d01ca166266d70fd5605d3397a41f067022ce04a2e1ced4c8d"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:ec0b0b6cd0b84f03537f22b719aca705b876c54ab5cf3471d551c9644127284f"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df3287dc304c8c4556dc85c4ab89eb333307759c1863f95e72e555c0cfce3e01"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09f40add3c2d208e20f8bf185df38f992bf5092202d2d30eced8f6959963f1d5"}, + {file = "orjson-3.8.5-cp38-none-win_amd64.whl", hash = "sha256:232ec1df0d708f74e0dd1fccac1e9a7008cd120d48fe695e8f0c9d80771da430"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8fba3e7aede3e88a01e94e6fe63d4580162b212e6da27ae85af50a1787e41416"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:85e22c358cab170c8604e9edfffcc45dd7b0027ce57ed6bcacb556e8bfbbb704"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeab1d8247507a75926adf3ca995c74e91f5db1f168815bf3e774f992ba52b50"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daaaef15a41e9e8cadc7677cefe00065ae10bce914eefe8da1cd26b3d063970b"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ccc9f52cf46bd353c6ae1153eaf9d18257ddc110d135198b0cd8718474685ce"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d48c182c7ff4ea0787806de8a2f9298ca44fd0068ecd5f23a4b2d8e03c745cb6"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1848e3b4cc09cc82a67262ae56e2a772b0548bb5a6f9dcaee10dcaaf0a5177b7"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38480031bc8add58effe802291e4abf7042ef72ae1a4302efe9a36c8f8bfbfcc"}, + {file = "orjson-3.8.5-cp39-none-win_amd64.whl", hash = "sha256:0e9a1c2e649cbaed410c882cedc8f3b993d8f1426d9327f31762d3f46fe7cc88"}, + {file = "orjson-3.8.5.tar.gz", hash = "sha256:77a3b2bd0c4ef7723ea09081e3329dac568a62463aed127c1501441b07ffc64b"}, @@ -1187,2 +1261,2 @@ packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -1191,2 +1265,2 @@ pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, @@ -1195,2 +1269,2 @@ pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, @@ -1207,2 +1281,2 @@ pip-audit = [ - {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, - {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, + {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, + {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, @@ -1211,2 +1285,2 @@ pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, + {file = "pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"}, + {file = "pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526"}, @@ -1215,2 +1289,2 @@ platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, @@ -1251,2 +1325,2 @@ pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -1370,2 +1444,2 @@ pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -1424,2 +1498,2 @@ requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -1432,2 +1506,2 @@ rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, + {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -1436,2 +1510,2 @@ setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, + {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, @@ -1508,2 +1582,2 @@ urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 3779d1f4..49dacf39 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } diff --git a/libs/libcommon/Makefile b/libs/libcommon/Makefile index 57583a9c..932267a9 100644 --- a/libs/libcommon/Makefile +++ b/libs/libcommon/Makefile @@ -10 +9,0 @@ include ../../tools/Python.mk -include ../../tools/PythonAudit.mk diff --git a/libs/libcommon/dist/libcommon-0.6.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.2-py3-none-any.whl new file mode 100644 index 00000000..8f4748dd Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.2-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.2.tar.gz b/libs/libcommon/dist/libcommon-0.6.2.tar.gz new file mode 100644 index 00000000..75a67342 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.2.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.6.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.3-py3-none-any.whl new file mode 100644 index 00000000..d655f2a3 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.3-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.3.tar.gz b/libs/libcommon/dist/libcommon-0.6.3.tar.gz new file mode 100644 index 00000000..05e8b2b7 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.3.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl new file mode 100644 index 00000000..281e35c4 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.4.tar.gz b/libs/libcommon/dist/libcommon-0.6.4.tar.gz new file mode 100644 index 00000000..ed1c82ac Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.4.tar.gz differ diff --git a/libs/libcommon/poetry.lock b/libs/libcommon/poetry.lock index befc10f7..d95eca95 100644 --- a/libs/libcommon/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -11 +11 @@ name = "attrs" -version = "22.1.0" +version = "22.2.0" @@ -15 +15 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" @@ -18,4 +18,5 @@ python-versions = ">=3.5" -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] @@ -44 +45 @@ name = "black" -version = "22.10.0" +version = "22.12.0" @@ -91 +92 @@ name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" @@ -95,4 +96 @@ optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = "*" @@ -132 +130 @@ name = "coverage" -version = "6.5.0" +version = "7.0.5" @@ -143 +141 @@ name = "cyclonedx-python-lib" -version = "3.1.1" +version = "3.1.5" @@ -187 +185 @@ name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" @@ -198 +196 @@ name = "filelock" -version = "3.8.1" +version = "3.9.0" @@ -205,2 +203,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -234 +232 @@ name = "gitpython" -version = "3.1.29" +version = "3.1.30" @@ -298,2 +296,2 @@ name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" @@ -302 +300 @@ optional = false -python-versions = "*" +python-versions = ">=3.7" @@ -306 +304 @@ name = "isort" -version = "5.10.1" +version = "5.11.4" @@ -310 +308 @@ optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" @@ -404 +402 @@ name = "orjson" -version = "3.8.3" +version = "3.8.5" @@ -424 +422 @@ name = "packaging" -version = "21.3" +version = "23.0" @@ -428,4 +426 @@ optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" @@ -435 +430 @@ name = "pathspec" -version = "0.10.2" +version = "0.10.3" @@ -443 +438 @@ name = "pbr" -version = "5.11.0" +version = "5.11.1" @@ -470 +465 @@ name = "pip-audit" -version = "2.4.7" +version = "2.4.13" @@ -480 +475 @@ html5lib = ">=1.1" -packaging = ">=21.0.0" +packaging = ">=23.0.0" @@ -482 +477 @@ pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" +pip-requirements-parser = ">=32.0.0" @@ -489 +484 @@ dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] @@ -494 +489 @@ name = "pip-requirements-parser" -version = "31.2.0" +version = "32.0.1" @@ -498 +493 @@ optional = false -python-versions = ">=3.6.*" +python-versions = ">=3.6.0" @@ -501,0 +497 @@ packaging = "*" +pyparsing = "*" @@ -505 +501 @@ docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] +testing = ["aboutcode-toolkit (>=6.0.0)", "black", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] @@ -509 +505 @@ name = "platformdirs" -version = "2.5.4" +version = "2.6.2" @@ -516,2 +512,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -571 +567 @@ name = "pygments" -version = "2.13.0" +version = "2.14.0" @@ -605 +601 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "main" +category = "dev" @@ -614 +610 @@ name = "pytest" -version = "7.2.0" +version = "7.2.1" @@ -669 +665 @@ name = "requests" -version = "2.28.1" +version = "2.28.2" @@ -677 +673 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" @@ -701 +697 @@ name = "rich" -version = "12.6.0" +version = "13.1.0" @@ -705 +701 @@ optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" @@ -716 +712 @@ name = "setuptools" -version = "65.6.3" +version = "66.0.0" @@ -723 +719 @@ python-versions = ">=3.7" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -829 +825 @@ name = "urllib3" -version = "1.26.13" +version = "1.26.14" @@ -859,2 +855,2 @@ attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -867,21 +863,12 @@ black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, @@ -898,2 +885,88 @@ charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, @@ -914,50 +987,51 @@ coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, @@ -966,2 +1040,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, - {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, + {file = "cyclonedx_python_lib-3.1.5-py3-none-any.whl", hash = "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6"}, + {file = "cyclonedx_python_lib-3.1.5.tar.gz", hash = "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3"}, @@ -978,2 +1052,2 @@ exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -982,2 +1056,2 @@ filelock = [ - {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, - {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, @@ -994,2 +1068,2 @@ gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, @@ -1010,2 +1084,2 @@ iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1014,2 +1088,2 @@ isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, @@ -1120,44 +1194,44 @@ orjson = [ - {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, - {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, - {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, - {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, - {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, - {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, - {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, - {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:143639b9898b094883481fac37733231da1c2ae3aec78a1dd8d3b58c9c9fceef"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:31f43e63e0d94784c55e86bd376df3f80b574bea8c0bc5ecd8041009fa8ec78a"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c802ea6d4a0d40f096aceb5e7ef0a26c23d276cb9334e1cadcf256bb090b6426"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf298b55b371c2772420c5ace4d47b0a3ea1253667e20ded3c363160fd0575f6"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68cb4a8501a463771d55bb22fc72795ec7e21d71ab083e000a2c3b651b6fb2af"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4f1427952b3bd92bfb63a61b7ffc33a9f54ec6de296fa8d924cbeba089866acb"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0a9f329468c8eb000742455b83546849bcd69495d6baa6e171c7ee8600a47bd"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6535d527aa1e4a757a6ce9b61f3dd74edc762e7d2c6991643aae7c560c8440bd"}, + {file = "orjson-3.8.5-cp310-none-win_amd64.whl", hash = "sha256:2eee64c028adf6378dd714c8debc96d5b92b6bb4862debb65ca868e59bac6c63"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f5745ff473dd5c6718bf8c8d5bc183f638b4f3e03c7163ffcda4d4ef453f42ff"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:544f1240b295083697027a5093ec66763218ff16f03521d5020e7a436d2e417b"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85c9c6bab97a831e7741089057347d99901b4db2451a076ca8adedc7d96297f"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bae7347764e7be6dada980fd071e865544c98317ab61af575c9cc5e1dc7e3fe"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f6f6e9d26a06b63126112a7bc8d8529df048d31df2a257a8484b76adf3e5d"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:758238364142fcbeca34c968beefc0875ffa10aa2f797c82f51cfb1d22d0934e"}, + {file = "orjson-3.8.5-cp311-none-win_amd64.whl", hash = "sha256:cc7579240fb88a626956a6cb4a181a11b62afbc409ce239a7b866568a2412fa2"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:79aa3e47cbbd4eedbbde4f988f766d6cf38ccb51d52cfabfeb6b8d1b58654d25"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2544cd0d089faa862f5a39f508ee667419e3f9e11f119a6b1505cfce0eb26601"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2be0025ca7e460bcacb250aba8ce0239be62957d58cf34045834cc9302611d3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b57bf72902d818506906e49c677a791f90dbd7f0997d60b14bc6c1ce4ce4cf9"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ae9832a11c6a9efa8c14224e5caf6e35046efd781de14e59eb69ab4e561cf3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:0e28330cc6d51741cad0edd1b57caf6c5531aff30afe41402acde0a03246b8ed"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:155954d725627b5480e6cc1ca488afb4fa685099a4ace5f5bf21a182fabf6706"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ece1b6ef9312df5d5274ca6786e613b7da7de816356e36bcad9ea8a73d15ab71"}, + {file = "orjson-3.8.5-cp37-none-win_amd64.whl", hash = "sha256:6f58d1f0702332496bc1e2d267c7326c851991b62cf6395370d59c47f9890007"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:933f4ab98362f46a59a6d0535986e1f0cae2f6b42435e24a55922b4bc872af0c"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:47a7ca236b25a138a74b2cb5169adcdc5b2b8abdf661de438ba65967a2cde9dc"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b573ca942c626fcf8a86be4f180b86b2498b18ae180f37b4180c2aced5808710"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9bab11611d5452efe4ae5315f5eb806f66104c08a089fb84c648d2e8e00f106"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee2f5f6476617d01ca166266d70fd5605d3397a41f067022ce04a2e1ced4c8d"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:ec0b0b6cd0b84f03537f22b719aca705b876c54ab5cf3471d551c9644127284f"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df3287dc304c8c4556dc85c4ab89eb333307759c1863f95e72e555c0cfce3e01"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09f40add3c2d208e20f8bf185df38f992bf5092202d2d30eced8f6959963f1d5"}, + {file = "orjson-3.8.5-cp38-none-win_amd64.whl", hash = "sha256:232ec1df0d708f74e0dd1fccac1e9a7008cd120d48fe695e8f0c9d80771da430"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8fba3e7aede3e88a01e94e6fe63d4580162b212e6da27ae85af50a1787e41416"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:85e22c358cab170c8604e9edfffcc45dd7b0027ce57ed6bcacb556e8bfbbb704"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeab1d8247507a75926adf3ca995c74e91f5db1f168815bf3e774f992ba52b50"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daaaef15a41e9e8cadc7677cefe00065ae10bce914eefe8da1cd26b3d063970b"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ccc9f52cf46bd353c6ae1153eaf9d18257ddc110d135198b0cd8718474685ce"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d48c182c7ff4ea0787806de8a2f9298ca44fd0068ecd5f23a4b2d8e03c745cb6"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1848e3b4cc09cc82a67262ae56e2a772b0548bb5a6f9dcaee10dcaaf0a5177b7"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38480031bc8add58effe802291e4abf7042ef72ae1a4302efe9a36c8f8bfbfcc"}, + {file = "orjson-3.8.5-cp39-none-win_amd64.whl", hash = "sha256:0e9a1c2e649cbaed410c882cedc8f3b993d8f1426d9327f31762d3f46fe7cc88"}, + {file = "orjson-3.8.5.tar.gz", hash = "sha256:77a3b2bd0c4ef7723ea09081e3329dac568a62463aed127c1501441b07ffc64b"}, @@ -1170,2 +1244,2 @@ packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -1174,2 +1248,2 @@ pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, @@ -1178,2 +1252,2 @@ pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, @@ -1190,2 +1264,2 @@ pip-audit = [ - {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, - {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, + {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, + {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, @@ -1194,2 +1268,2 @@ pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, + {file = "pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"}, + {file = "pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526"}, @@ -1198,2 +1272,2 @@ platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, @@ -1234,2 +1308,2 @@ pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -1353,2 +1427,2 @@ pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -1407,2 +1481,2 @@ requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -1415,2 +1489,2 @@ rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, + {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -1419,2 +1493,2 @@ setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, + {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, @@ -1495,2 +1569,2 @@ urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 7da0cfe0..54523410 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.6.1" +version = "0.6.4" diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index e6ab02f1..084f1d12 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -147 +146,0 @@ class ProcessingGraphConfig: - "/parquet": {"input_type": "dataset"}, @@ -148,0 +148,3 @@ class ProcessingGraphConfig: + "/parquet-and-dataset-info": {"input_type": "dataset"}, + "/parquet": {"input_type": "dataset", "requires": "/parquet-and-dataset-info"}, + "/dataset-info": {"input_type": "dataset", "requires": "/parquet-and-dataset-info"}, diff --git a/libs/libcommon/tests/conftest.py b/libs/libcommon/tests/conftest.py index d1d4199b..154c57e8 100644 --- a/libs/libcommon/tests/conftest.py +++ b/libs/libcommon/tests/conftest.py @@ -8 +7,0 @@ from libcommon.config import CacheConfig, QueueConfig -from libcommon.processing_graph import ProcessingStep @@ -14 +13 @@ env = Env(expand_vars=True) -@fixture(scope="session", autouse=True) +@fixture() @@ -22 +21 @@ def cache_config() -> CacheConfig: -@fixture(scope="session", autouse=True) +@fixture() @@ -28,13 +26,0 @@ def queue_config() -> QueueConfig: - - -@fixture(scope="session") -def test_processing_step() -> ProcessingStep: - return ProcessingStep( - endpoint="/dummy", - input_type="dataset", - requires=None, - required_by_dataset_viewer=False, - parent=None, - ancestors=[], - children=[], - ) diff --git a/libs/libcommon/tests/test_processing_steps.py b/libs/libcommon/tests/test_processing_steps.py index ef50121e..6123f99a 100644 --- a/libs/libcommon/tests/test_processing_steps.py +++ b/libs/libcommon/tests/test_processing_steps.py @@ -12,0 +13 @@ def test_default_graph(): + parquet_and_dataset_info = graph.get_step("/parquet-and-dataset-info") @@ -13,0 +15 @@ def test_default_graph(): + dataset_info = graph.get_step("/dataset-info") @@ -16,0 +19 @@ def test_default_graph(): + assert parquet_and_dataset_info is not None @@ -17,0 +21 @@ def test_default_graph(): + assert dataset_info is not None @@ -21 +25,3 @@ def test_default_graph(): - assert parquet.parent is None + assert parquet_and_dataset_info.parent is None + assert parquet.parent is parquet_and_dataset_info + assert dataset_info.parent is parquet_and_dataset_info @@ -24,0 +31 @@ def test_default_graph(): + assert parquet_and_dataset_info.children == [parquet, dataset_info] @@ -25,0 +33 @@ def test_default_graph(): + assert dataset_info.children == [] @@ -29 +37,3 @@ def test_default_graph(): - assert parquet.get_ancestors() == [] + assert parquet_and_dataset_info.get_ancestors() == [] + assert parquet.get_ancestors() == [parquet_and_dataset_info] + assert dataset_info.get_ancestors() == [parquet_and_dataset_info] @@ -31 +41 @@ def test_default_graph(): - assert graph.get_first_steps() == [splits, parquet] + assert graph.get_first_steps() == [splits, parquet_and_dataset_info] diff --git a/services/admin/Makefile b/services/admin/Makefile index 5f06b08a..1482c281 100644 --- a/services/admin/Makefile +++ b/services/admin/Makefile @@ -10,2 +9,0 @@ include ../../tools/Python.mk -include ../../tools/PythonAudit.mk -#include ../../tools/PythonTest.mk diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index d575e00a..5081f356 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -28 +28 @@ name = "asgiref" -version = "3.5.2" +version = "3.6.0" @@ -39 +39 @@ name = "attrs" -version = "22.1.0" +version = "22.2.0" @@ -43 +43 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" @@ -46,4 +46,5 @@ python-versions = ">=3.5" -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] @@ -72 +73 @@ name = "black" -version = "22.10.0" +version = "22.12.0" @@ -119 +120 @@ name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" @@ -123,4 +124 @@ optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = "*" @@ -160 +158 @@ name = "coverage" -version = "6.5.0" +version = "7.0.5" @@ -171 +169 @@ name = "cyclonedx-python-lib" -version = "3.1.1" +version = "3.1.5" @@ -215 +213 @@ name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" @@ -226 +224 @@ name = "filelock" -version = "3.8.1" +version = "3.9.0" @@ -233,2 +231,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -262 +260 @@ name = "gitpython" -version = "3.1.29" +version = "3.1.30" @@ -334,2 +332,2 @@ name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" @@ -338 +336 @@ optional = false -python-versions = "*" +python-versions = ">=3.7" @@ -342 +340 @@ name = "isort" -version = "5.10.1" +version = "5.11.4" @@ -346 +344 @@ optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" @@ -356 +354 @@ name = "libcommon" -version = "0.6.0" +version = "0.6.4" @@ -374 +372 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" @@ -462 +460 @@ name = "orjson" -version = "3.8.3" +version = "3.8.5" @@ -482 +480 @@ name = "packaging" -version = "21.3" +version = "23.0" @@ -486,4 +484 @@ optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" @@ -493 +488 @@ name = "pathspec" -version = "0.10.2" +version = "0.10.3" @@ -501 +496 @@ name = "pbr" -version = "5.11.0" +version = "5.11.1" @@ -528 +523 @@ name = "pip-audit" -version = "2.4.7" +version = "2.4.13" @@ -538 +533 @@ html5lib = ">=1.1" -packaging = ">=21.0.0" +packaging = ">=23.0.0" @@ -540 +535 @@ pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" +pip-requirements-parser = ">=32.0.0" @@ -547 +542 @@ dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] @@ -552 +547 @@ name = "pip-requirements-parser" -version = "31.2.0" +version = "32.0.1" @@ -556 +551 @@ optional = false -python-versions = ">=3.6.*" +python-versions = ">=3.6.0" @@ -559,0 +555 @@ packaging = "*" +pyparsing = "*" @@ -563 +559 @@ docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] +testing = ["aboutcode-toolkit (>=6.0.0)", "black", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] @@ -567 +563 @@ name = "platformdirs" -version = "2.5.4" +version = "2.6.2" @@ -574,2 +570,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -640 +636 @@ name = "pygments" -version = "2.13.0" +version = "2.14.0" @@ -674 +670 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "main" +category = "dev" @@ -683 +679 @@ name = "pytest" -version = "7.2.0" +version = "7.2.1" @@ -738 +734 @@ name = "requests" -version = "2.28.1" +version = "2.28.2" @@ -746 +742 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" @@ -785 +781 @@ name = "rich" -version = "12.6.0" +version = "13.1.0" @@ -789 +785 @@ optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" @@ -800 +796 @@ name = "setuptools" -version = "65.6.3" +version = "66.0.0" @@ -807 +803 @@ python-versions = ">=3.7" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -947 +943 @@ name = "urllib3" -version = "1.26.13" +version = "1.26.14" @@ -976 +972 @@ name = "watchdog" -version = "2.1.9" +version = "2.2.1" @@ -999 +995 @@ python-versions = "3.9.15" -content-hash = "24b868926cddaabeffd1ebc8940dc3988ca9b7e0c56145624162839bd2997f5f" +content-hash = "d4bd5baac88d3873fbf416c00ccdc8f224f4941ea0e81e176af32981b9128913" @@ -1011,2 +1007,2 @@ asgiref = [ - {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, - {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, + {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, + {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, @@ -1015,2 +1011,2 @@ attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -1023,21 +1019,12 @@ black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, @@ -1054,2 +1041,88 @@ charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, @@ -1070,50 +1143,51 @@ coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, @@ -1122,2 +1196,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, - {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, + {file = "cyclonedx_python_lib-3.1.5-py3-none-any.whl", hash = "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6"}, + {file = "cyclonedx_python_lib-3.1.5.tar.gz", hash = "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3"}, @@ -1134,2 +1208,2 @@ exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -1138,2 +1212,2 @@ filelock = [ - {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, - {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, @@ -1150,2 +1224,2 @@ gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, @@ -1170,2 +1244,2 @@ iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1174,2 +1248,2 @@ isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, @@ -1178 +1252 @@ libcommon = [ - {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, + {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, @@ -1283,44 +1357,44 @@ orjson = [ - {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, - {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, - {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, - {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, - {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, - {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, - {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, - {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:143639b9898b094883481fac37733231da1c2ae3aec78a1dd8d3b58c9c9fceef"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:31f43e63e0d94784c55e86bd376df3f80b574bea8c0bc5ecd8041009fa8ec78a"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c802ea6d4a0d40f096aceb5e7ef0a26c23d276cb9334e1cadcf256bb090b6426"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf298b55b371c2772420c5ace4d47b0a3ea1253667e20ded3c363160fd0575f6"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68cb4a8501a463771d55bb22fc72795ec7e21d71ab083e000a2c3b651b6fb2af"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4f1427952b3bd92bfb63a61b7ffc33a9f54ec6de296fa8d924cbeba089866acb"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0a9f329468c8eb000742455b83546849bcd69495d6baa6e171c7ee8600a47bd"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6535d527aa1e4a757a6ce9b61f3dd74edc762e7d2c6991643aae7c560c8440bd"}, + {file = "orjson-3.8.5-cp310-none-win_amd64.whl", hash = "sha256:2eee64c028adf6378dd714c8debc96d5b92b6bb4862debb65ca868e59bac6c63"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f5745ff473dd5c6718bf8c8d5bc183f638b4f3e03c7163ffcda4d4ef453f42ff"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:544f1240b295083697027a5093ec66763218ff16f03521d5020e7a436d2e417b"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85c9c6bab97a831e7741089057347d99901b4db2451a076ca8adedc7d96297f"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bae7347764e7be6dada980fd071e865544c98317ab61af575c9cc5e1dc7e3fe"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f6f6e9d26a06b63126112a7bc8d8529df048d31df2a257a8484b76adf3e5d"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:758238364142fcbeca34c968beefc0875ffa10aa2f797c82f51cfb1d22d0934e"}, + {file = "orjson-3.8.5-cp311-none-win_amd64.whl", hash = "sha256:cc7579240fb88a626956a6cb4a181a11b62afbc409ce239a7b866568a2412fa2"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:79aa3e47cbbd4eedbbde4f988f766d6cf38ccb51d52cfabfeb6b8d1b58654d25"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2544cd0d089faa862f5a39f508ee667419e3f9e11f119a6b1505cfce0eb26601"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2be0025ca7e460bcacb250aba8ce0239be62957d58cf34045834cc9302611d3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b57bf72902d818506906e49c677a791f90dbd7f0997d60b14bc6c1ce4ce4cf9"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ae9832a11c6a9efa8c14224e5caf6e35046efd781de14e59eb69ab4e561cf3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:0e28330cc6d51741cad0edd1b57caf6c5531aff30afe41402acde0a03246b8ed"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:155954d725627b5480e6cc1ca488afb4fa685099a4ace5f5bf21a182fabf6706"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ece1b6ef9312df5d5274ca6786e613b7da7de816356e36bcad9ea8a73d15ab71"}, + {file = "orjson-3.8.5-cp37-none-win_amd64.whl", hash = "sha256:6f58d1f0702332496bc1e2d267c7326c851991b62cf6395370d59c47f9890007"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:933f4ab98362f46a59a6d0535986e1f0cae2f6b42435e24a55922b4bc872af0c"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:47a7ca236b25a138a74b2cb5169adcdc5b2b8abdf661de438ba65967a2cde9dc"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b573ca942c626fcf8a86be4f180b86b2498b18ae180f37b4180c2aced5808710"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9bab11611d5452efe4ae5315f5eb806f66104c08a089fb84c648d2e8e00f106"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee2f5f6476617d01ca166266d70fd5605d3397a41f067022ce04a2e1ced4c8d"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:ec0b0b6cd0b84f03537f22b719aca705b876c54ab5cf3471d551c9644127284f"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df3287dc304c8c4556dc85c4ab89eb333307759c1863f95e72e555c0cfce3e01"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09f40add3c2d208e20f8bf185df38f992bf5092202d2d30eced8f6959963f1d5"}, + {file = "orjson-3.8.5-cp38-none-win_amd64.whl", hash = "sha256:232ec1df0d708f74e0dd1fccac1e9a7008cd120d48fe695e8f0c9d80771da430"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8fba3e7aede3e88a01e94e6fe63d4580162b212e6da27ae85af50a1787e41416"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:85e22c358cab170c8604e9edfffcc45dd7b0027ce57ed6bcacb556e8bfbbb704"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeab1d8247507a75926adf3ca995c74e91f5db1f168815bf3e774f992ba52b50"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daaaef15a41e9e8cadc7677cefe00065ae10bce914eefe8da1cd26b3d063970b"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ccc9f52cf46bd353c6ae1153eaf9d18257ddc110d135198b0cd8718474685ce"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d48c182c7ff4ea0787806de8a2f9298ca44fd0068ecd5f23a4b2d8e03c745cb6"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1848e3b4cc09cc82a67262ae56e2a772b0548bb5a6f9dcaee10dcaaf0a5177b7"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38480031bc8add58effe802291e4abf7042ef72ae1a4302efe9a36c8f8bfbfcc"}, + {file = "orjson-3.8.5-cp39-none-win_amd64.whl", hash = "sha256:0e9a1c2e649cbaed410c882cedc8f3b993d8f1426d9327f31762d3f46fe7cc88"}, + {file = "orjson-3.8.5.tar.gz", hash = "sha256:77a3b2bd0c4ef7723ea09081e3329dac568a62463aed127c1501441b07ffc64b"}, @@ -1333,2 +1407,2 @@ packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -1337,2 +1411,2 @@ pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, @@ -1341,2 +1415,2 @@ pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, @@ -1353,2 +1427,2 @@ pip-audit = [ - {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, - {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, + {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, + {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, @@ -1357,2 +1431,2 @@ pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, + {file = "pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"}, + {file = "pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526"}, @@ -1361,2 +1435,2 @@ platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, @@ -1401,2 +1475,2 @@ pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -1520,2 +1594,2 @@ pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -1574,2 +1648,2 @@ requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -1586,2 +1660,2 @@ rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, + {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -1590,2 +1664,2 @@ setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, + {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, @@ -1678,2 +1752,2 @@ urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, @@ -1686,25 +1760,28 @@ watchdog = [ - {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"}, - {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b17d302850c8d412784d9246cfe8d7e3af6bcd45f958abb2d08a6f8bedf695d"}, - {file = "watchdog-2.1.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee3e38a6cc050a8830089f79cbec8a3878ec2fe5160cdb2dc8ccb6def8552658"}, - {file = "watchdog-2.1.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64a27aed691408a6abd83394b38503e8176f69031ca25d64131d8d640a307591"}, - {file = "watchdog-2.1.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:195fc70c6e41237362ba720e9aaf394f8178bfc7fa68207f112d108edef1af33"}, - {file = "watchdog-2.1.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bfc4d351e6348d6ec51df007432e6fe80adb53fd41183716017026af03427846"}, - {file = "watchdog-2.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8250546a98388cbc00c3ee3cc5cf96799b5a595270dfcfa855491a64b86ef8c3"}, - {file = "watchdog-2.1.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:117ffc6ec261639a0209a3252546b12800670d4bf5f84fbd355957a0595fe654"}, - {file = "watchdog-2.1.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:97f9752208f5154e9e7b76acc8c4f5a58801b338de2af14e7e181ee3b28a5d39"}, - {file = "watchdog-2.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:247dcf1df956daa24828bfea5a138d0e7a7c98b1a47cf1fa5b0c3c16241fcbb7"}, - {file = "watchdog-2.1.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:226b3c6c468ce72051a4c15a4cc2ef317c32590d82ba0b330403cafd98a62cfd"}, - {file = "watchdog-2.1.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d9820fe47c20c13e3c9dd544d3706a2a26c02b2b43c993b62fcd8011bcc0adb3"}, - {file = "watchdog-2.1.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70af927aa1613ded6a68089a9262a009fbdf819f46d09c1a908d4b36e1ba2b2d"}, - {file = "watchdog-2.1.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed80a1628cee19f5cfc6bb74e173f1b4189eb532e705e2a13e3250312a62e0c9"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9f05a5f7c12452f6a27203f76779ae3f46fa30f1dd833037ea8cbc2887c60213"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_armv7l.whl", hash = "sha256:255bb5758f7e89b1a13c05a5bceccec2219f8995a3a4c4d6968fe1de6a3b2892"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_i686.whl", hash = "sha256:d3dda00aca282b26194bdd0adec21e4c21e916956d972369359ba63ade616153"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_ppc64.whl", hash = "sha256:186f6c55abc5e03872ae14c2f294a153ec7292f807af99f57611acc8caa75306"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:083171652584e1b8829581f965b9b7723ca5f9a2cd7e20271edf264cfd7c1412"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_s390x.whl", hash = "sha256:b530ae007a5f5d50b7fbba96634c7ee21abec70dc3e7f0233339c81943848dc1"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4f4e1c4aa54fb86316a62a87b3378c025e228178d55481d30d857c6c438897d6"}, - {file = "watchdog-2.1.9-py3-none-win32.whl", hash = "sha256:5952135968519e2447a01875a6f5fc8c03190b24d14ee52b0f4b1682259520b1"}, - {file = "watchdog-2.1.9-py3-none-win_amd64.whl", hash = "sha256:7a833211f49143c3d336729b0020ffd1274078e94b0ae42e22f596999f50279c"}, - {file = "watchdog-2.1.9-py3-none-win_ia64.whl", hash = "sha256:ad576a565260d8f99d97f2e64b0f97a48228317095908568a9d5c786c829d428"}, - {file = "watchdog-2.1.9.tar.gz", hash = "sha256:43ce20ebb36a51f21fa376f76d1d4692452b2527ccd601950d69ed36b9e21609"}, + {file = "watchdog-2.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a09483249d25cbdb4c268e020cb861c51baab2d1affd9a6affc68ffe6a231260"}, + {file = "watchdog-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5100eae58133355d3ca6c1083a33b81355c4f452afa474c2633bd2fbbba398b3"}, + {file = "watchdog-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e618a4863726bc7a3c64f95c218437f3349fb9d909eb9ea3a1ed3b567417c661"}, + {file = "watchdog-2.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:102a60093090fc3ff76c983367b19849b7cc24ec414a43c0333680106e62aae1"}, + {file = "watchdog-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:748ca797ff59962e83cc8e4b233f87113f3cf247c23e6be58b8a2885c7337aa3"}, + {file = "watchdog-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ccd8d84b9490a82b51b230740468116b8205822ea5fdc700a553d92661253a3"}, + {file = "watchdog-2.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6e01d699cd260d59b84da6bda019dce0a3353e3fcc774408ae767fe88ee096b7"}, + {file = "watchdog-2.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8586d98c494690482c963ffb24c49bf9c8c2fe0589cec4dc2f753b78d1ec301d"}, + {file = "watchdog-2.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:adaf2ece15f3afa33a6b45f76b333a7da9256e1360003032524d61bdb4c422ae"}, + {file = "watchdog-2.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83a7cead445008e880dbde833cb9e5cc7b9a0958edb697a96b936621975f15b9"}, + {file = "watchdog-2.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8ac23ff2c2df4471a61af6490f847633024e5aa120567e08d07af5718c9d092"}, + {file = "watchdog-2.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d0f29fd9f3f149a5277929de33b4f121a04cf84bb494634707cfa8ea8ae106a8"}, + {file = "watchdog-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:967636031fa4c4955f0f3f22da3c5c418aa65d50908d31b73b3b3ffd66d60640"}, + {file = "watchdog-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96cbeb494e6cbe3ae6aacc430e678ce4b4dd3ae5125035f72b6eb4e5e9eb4f4e"}, + {file = "watchdog-2.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61fdb8e9c57baf625e27e1420e7ca17f7d2023929cd0065eb79c83da1dfbeacd"}, + {file = "watchdog-2.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb5ecc332112017fbdb19ede78d92e29a8165c46b68a0b8ccbd0a154f196d5e"}, + {file = "watchdog-2.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a480d122740debf0afac4ddd583c6c0bb519c24f817b42ed6f850e2f6f9d64a8"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:978a1aed55de0b807913b7482d09943b23a2d634040b112bdf31811a422f6344"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:8c28c23972ec9c524967895ccb1954bc6f6d4a557d36e681a36e84368660c4ce"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_i686.whl", hash = "sha256:c27d8c1535fd4474e40a4b5e01f4ba6720bac58e6751c667895cbc5c8a7af33c"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d6b87477752bd86ac5392ecb9eeed92b416898c30bd40c7e2dd03c3146105646"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:cece1aa596027ff56369f0b50a9de209920e1df9ac6d02c7f9e5d8162eb4f02b"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:8b5cde14e5c72b2df5d074774bdff69e9b55da77e102a91f36ef26ca35f9819c"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e038be858425c4f621900b8ff1a3a1330d9edcfeaa1c0468aeb7e330fb87693e"}, + {file = "watchdog-2.2.1-py3-none-win32.whl", hash = "sha256:bc43c1b24d2f86b6e1cc15f68635a959388219426109233e606517ff7d0a5a73"}, + {file = "watchdog-2.2.1-py3-none-win_amd64.whl", hash = "sha256:17f1708f7410af92ddf591e94ae71a27a13974559e72f7e9fde3ec174b26ba2e"}, + {file = "watchdog-2.2.1-py3-none-win_ia64.whl", hash = "sha256:195ab1d9d611a4c1e5311cbf42273bc541e18ea8c32712f2fb703cfc6ff006f9"}, + {file = "watchdog-2.2.1.tar.gz", hash = "sha256:cdcc23c9528601a8a293eb4369cbd14f6b4f34f07ae8769421252e9c22718b6f"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index fcb01148..496ad15c 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } diff --git a/services/api/Makefile b/services/api/Makefile index f7c14367..98ba5614 100644 --- a/services/api/Makefile +++ b/services/api/Makefile @@ -10,2 +9,0 @@ include ../../tools/Python.mk -include ../../tools/PythonAudit.mk -#include ../../tools/PythonTest.mk diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 814cead1..4d2956c6 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -28 +28 @@ name = "asgiref" -version = "3.5.2" +version = "3.6.0" @@ -39 +39 @@ name = "attrs" -version = "22.1.0" +version = "22.2.0" @@ -43 +43 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" @@ -46,4 +46,5 @@ python-versions = ">=3.5" -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] @@ -72 +73 @@ name = "black" -version = "22.10.0" +version = "22.12.0" @@ -119 +120 @@ name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" @@ -123,4 +124 @@ optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = "*" @@ -160 +158 @@ name = "coverage" -version = "6.5.0" +version = "7.0.5" @@ -171 +169 @@ name = "cyclonedx-python-lib" -version = "3.1.1" +version = "3.1.5" @@ -215 +213 @@ name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" @@ -226 +224 @@ name = "filelock" -version = "3.8.1" +version = "3.9.0" @@ -233,2 +231,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -262 +260 @@ name = "gitpython" -version = "3.1.29" +version = "3.1.30" @@ -334,2 +332,2 @@ name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" @@ -338 +336 @@ optional = false -python-versions = "*" +python-versions = ">=3.7" @@ -342 +340 @@ name = "isort" -version = "5.10.1" +version = "5.11.4" @@ -346 +344 @@ optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" @@ -372 +370 @@ name = "libcommon" -version = "0.6.0" +version = "0.6.4" @@ -390 +388 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" @@ -486 +484 @@ name = "orjson" -version = "3.8.3" +version = "3.8.5" @@ -506 +504 @@ name = "packaging" -version = "21.3" +version = "23.0" @@ -510,4 +508 @@ optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" @@ -517 +512 @@ name = "pathspec" -version = "0.10.2" +version = "0.10.3" @@ -525 +520 @@ name = "pbr" -version = "5.11.0" +version = "5.11.1" @@ -552 +547 @@ name = "pip-audit" -version = "2.4.7" +version = "2.4.13" @@ -562 +557 @@ html5lib = ">=1.1" -packaging = ">=21.0.0" +packaging = ">=23.0.0" @@ -564 +559 @@ pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" +pip-requirements-parser = ">=32.0.0" @@ -571 +566 @@ dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] @@ -576 +571 @@ name = "pip-requirements-parser" -version = "31.2.0" +version = "32.0.1" @@ -580 +575 @@ optional = false -python-versions = ">=3.6.*" +python-versions = ">=3.6.0" @@ -583,0 +579 @@ packaging = "*" +pyparsing = "*" @@ -587 +583 @@ docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] +testing = ["aboutcode-toolkit (>=6.0.0)", "black", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] @@ -591 +587 @@ name = "platformdirs" -version = "2.5.4" +version = "2.6.2" @@ -598,2 +594,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -664 +660 @@ name = "pygments" -version = "2.13.0" +version = "2.14.0" @@ -698 +694 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "main" +category = "dev" @@ -707 +703 @@ name = "pyrsistent" -version = "0.19.2" +version = "0.19.3" @@ -715 +711 @@ name = "pytest" -version = "7.2.0" +version = "7.2.1" @@ -781 +777 @@ name = "requests" -version = "2.28.1" +version = "2.28.2" @@ -789 +785 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" @@ -813 +809 @@ name = "rich" -version = "12.6.0" +version = "13.1.0" @@ -817 +813 @@ optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" @@ -828 +824 @@ name = "setuptools" -version = "65.6.3" +version = "66.0.0" @@ -835 +831 @@ python-versions = ">=3.7" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -967 +963 @@ name = "urllib3" -version = "1.26.13" +version = "1.26.14" @@ -996 +992 @@ name = "watchdog" -version = "2.1.9" +version = "2.2.1" @@ -1033 +1029 @@ python-versions = "3.9.15" -content-hash = "7a0e0c8fb2bb502ec2951099fa3eb74fc3b5e3534e63b60390ad29784cb9aec7" +content-hash = "e0c8cefca08354a6e349d28076b13d5185cc9333350e3df742db512293063e8d" @@ -1045,2 +1041,2 @@ asgiref = [ - {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, - {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, + {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, + {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, @@ -1049,2 +1045,2 @@ attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -1057,21 +1053,12 @@ black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, @@ -1088,2 +1075,88 @@ charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, @@ -1104,50 +1177,51 @@ coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, @@ -1156,2 +1230,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, - {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, + {file = "cyclonedx_python_lib-3.1.5-py3-none-any.whl", hash = "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6"}, + {file = "cyclonedx_python_lib-3.1.5.tar.gz", hash = "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3"}, @@ -1168,2 +1242,2 @@ exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -1172,2 +1246,2 @@ filelock = [ - {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, - {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, @@ -1184,2 +1258,2 @@ gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, @@ -1204,2 +1278,2 @@ iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1208,2 +1282,2 @@ isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, @@ -1216 +1290 @@ libcommon = [ - {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, + {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, @@ -1363,44 +1437,44 @@ orjson = [ - {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, - {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, - {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, - {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, - {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, - {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, - {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, - {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:143639b9898b094883481fac37733231da1c2ae3aec78a1dd8d3b58c9c9fceef"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:31f43e63e0d94784c55e86bd376df3f80b574bea8c0bc5ecd8041009fa8ec78a"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c802ea6d4a0d40f096aceb5e7ef0a26c23d276cb9334e1cadcf256bb090b6426"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf298b55b371c2772420c5ace4d47b0a3ea1253667e20ded3c363160fd0575f6"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68cb4a8501a463771d55bb22fc72795ec7e21d71ab083e000a2c3b651b6fb2af"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4f1427952b3bd92bfb63a61b7ffc33a9f54ec6de296fa8d924cbeba089866acb"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0a9f329468c8eb000742455b83546849bcd69495d6baa6e171c7ee8600a47bd"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6535d527aa1e4a757a6ce9b61f3dd74edc762e7d2c6991643aae7c560c8440bd"}, + {file = "orjson-3.8.5-cp310-none-win_amd64.whl", hash = "sha256:2eee64c028adf6378dd714c8debc96d5b92b6bb4862debb65ca868e59bac6c63"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f5745ff473dd5c6718bf8c8d5bc183f638b4f3e03c7163ffcda4d4ef453f42ff"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:544f1240b295083697027a5093ec66763218ff16f03521d5020e7a436d2e417b"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85c9c6bab97a831e7741089057347d99901b4db2451a076ca8adedc7d96297f"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bae7347764e7be6dada980fd071e865544c98317ab61af575c9cc5e1dc7e3fe"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f6f6e9d26a06b63126112a7bc8d8529df048d31df2a257a8484b76adf3e5d"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:758238364142fcbeca34c968beefc0875ffa10aa2f797c82f51cfb1d22d0934e"}, + {file = "orjson-3.8.5-cp311-none-win_amd64.whl", hash = "sha256:cc7579240fb88a626956a6cb4a181a11b62afbc409ce239a7b866568a2412fa2"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:79aa3e47cbbd4eedbbde4f988f766d6cf38ccb51d52cfabfeb6b8d1b58654d25"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2544cd0d089faa862f5a39f508ee667419e3f9e11f119a6b1505cfce0eb26601"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2be0025ca7e460bcacb250aba8ce0239be62957d58cf34045834cc9302611d3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b57bf72902d818506906e49c677a791f90dbd7f0997d60b14bc6c1ce4ce4cf9"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ae9832a11c6a9efa8c14224e5caf6e35046efd781de14e59eb69ab4e561cf3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:0e28330cc6d51741cad0edd1b57caf6c5531aff30afe41402acde0a03246b8ed"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:155954d725627b5480e6cc1ca488afb4fa685099a4ace5f5bf21a182fabf6706"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ece1b6ef9312df5d5274ca6786e613b7da7de816356e36bcad9ea8a73d15ab71"}, + {file = "orjson-3.8.5-cp37-none-win_amd64.whl", hash = "sha256:6f58d1f0702332496bc1e2d267c7326c851991b62cf6395370d59c47f9890007"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:933f4ab98362f46a59a6d0535986e1f0cae2f6b42435e24a55922b4bc872af0c"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:47a7ca236b25a138a74b2cb5169adcdc5b2b8abdf661de438ba65967a2cde9dc"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b573ca942c626fcf8a86be4f180b86b2498b18ae180f37b4180c2aced5808710"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9bab11611d5452efe4ae5315f5eb806f66104c08a089fb84c648d2e8e00f106"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee2f5f6476617d01ca166266d70fd5605d3397a41f067022ce04a2e1ced4c8d"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:ec0b0b6cd0b84f03537f22b719aca705b876c54ab5cf3471d551c9644127284f"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df3287dc304c8c4556dc85c4ab89eb333307759c1863f95e72e555c0cfce3e01"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09f40add3c2d208e20f8bf185df38f992bf5092202d2d30eced8f6959963f1d5"}, + {file = "orjson-3.8.5-cp38-none-win_amd64.whl", hash = "sha256:232ec1df0d708f74e0dd1fccac1e9a7008cd120d48fe695e8f0c9d80771da430"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8fba3e7aede3e88a01e94e6fe63d4580162b212e6da27ae85af50a1787e41416"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:85e22c358cab170c8604e9edfffcc45dd7b0027ce57ed6bcacb556e8bfbbb704"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeab1d8247507a75926adf3ca995c74e91f5db1f168815bf3e774f992ba52b50"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daaaef15a41e9e8cadc7677cefe00065ae10bce914eefe8da1cd26b3d063970b"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ccc9f52cf46bd353c6ae1153eaf9d18257ddc110d135198b0cd8718474685ce"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d48c182c7ff4ea0787806de8a2f9298ca44fd0068ecd5f23a4b2d8e03c745cb6"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1848e3b4cc09cc82a67262ae56e2a772b0548bb5a6f9dcaee10dcaaf0a5177b7"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38480031bc8add58effe802291e4abf7042ef72ae1a4302efe9a36c8f8bfbfcc"}, + {file = "orjson-3.8.5-cp39-none-win_amd64.whl", hash = "sha256:0e9a1c2e649cbaed410c882cedc8f3b993d8f1426d9327f31762d3f46fe7cc88"}, + {file = "orjson-3.8.5.tar.gz", hash = "sha256:77a3b2bd0c4ef7723ea09081e3329dac568a62463aed127c1501441b07ffc64b"}, @@ -1413,2 +1487,2 @@ packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -1417,2 +1491,2 @@ pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, @@ -1421,2 +1495,2 @@ pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, @@ -1433,2 +1507,2 @@ pip-audit = [ - {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, - {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, + {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, + {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, @@ -1437,2 +1511,2 @@ pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, + {file = "pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"}, + {file = "pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526"}, @@ -1441,2 +1515,2 @@ platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, @@ -1481,2 +1555,2 @@ pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -1600,22 +1674,27 @@ pyrsistent = [ - {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win32.whl", hash = "sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win32.whl", hash = "sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73"}, - {file = "pyrsistent-0.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win32.whl", hash = "sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab"}, - {file = "pyrsistent-0.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win32.whl", hash = "sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291"}, - {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, - {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, @@ -1624,2 +1703,2 @@ pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -1682,2 +1761,2 @@ requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -1690,2 +1769,2 @@ rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, + {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -1694,2 +1773,2 @@ setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, + {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, @@ -1778,2 +1857,2 @@ urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, @@ -1786,25 +1865,28 @@ watchdog = [ - {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"}, - {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b17d302850c8d412784d9246cfe8d7e3af6bcd45f958abb2d08a6f8bedf695d"}, - {file = "watchdog-2.1.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee3e38a6cc050a8830089f79cbec8a3878ec2fe5160cdb2dc8ccb6def8552658"}, - {file = "watchdog-2.1.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64a27aed691408a6abd83394b38503e8176f69031ca25d64131d8d640a307591"}, - {file = "watchdog-2.1.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:195fc70c6e41237362ba720e9aaf394f8178bfc7fa68207f112d108edef1af33"}, - {file = "watchdog-2.1.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bfc4d351e6348d6ec51df007432e6fe80adb53fd41183716017026af03427846"}, - {file = "watchdog-2.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8250546a98388cbc00c3ee3cc5cf96799b5a595270dfcfa855491a64b86ef8c3"}, - {file = "watchdog-2.1.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:117ffc6ec261639a0209a3252546b12800670d4bf5f84fbd355957a0595fe654"}, - {file = "watchdog-2.1.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:97f9752208f5154e9e7b76acc8c4f5a58801b338de2af14e7e181ee3b28a5d39"}, - {file = "watchdog-2.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:247dcf1df956daa24828bfea5a138d0e7a7c98b1a47cf1fa5b0c3c16241fcbb7"}, - {file = "watchdog-2.1.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:226b3c6c468ce72051a4c15a4cc2ef317c32590d82ba0b330403cafd98a62cfd"}, - {file = "watchdog-2.1.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d9820fe47c20c13e3c9dd544d3706a2a26c02b2b43c993b62fcd8011bcc0adb3"}, - {file = "watchdog-2.1.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70af927aa1613ded6a68089a9262a009fbdf819f46d09c1a908d4b36e1ba2b2d"}, - {file = "watchdog-2.1.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed80a1628cee19f5cfc6bb74e173f1b4189eb532e705e2a13e3250312a62e0c9"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9f05a5f7c12452f6a27203f76779ae3f46fa30f1dd833037ea8cbc2887c60213"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_armv7l.whl", hash = "sha256:255bb5758f7e89b1a13c05a5bceccec2219f8995a3a4c4d6968fe1de6a3b2892"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_i686.whl", hash = "sha256:d3dda00aca282b26194bdd0adec21e4c21e916956d972369359ba63ade616153"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_ppc64.whl", hash = "sha256:186f6c55abc5e03872ae14c2f294a153ec7292f807af99f57611acc8caa75306"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:083171652584e1b8829581f965b9b7723ca5f9a2cd7e20271edf264cfd7c1412"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_s390x.whl", hash = "sha256:b530ae007a5f5d50b7fbba96634c7ee21abec70dc3e7f0233339c81943848dc1"}, - {file = "watchdog-2.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4f4e1c4aa54fb86316a62a87b3378c025e228178d55481d30d857c6c438897d6"}, - {file = "watchdog-2.1.9-py3-none-win32.whl", hash = "sha256:5952135968519e2447a01875a6f5fc8c03190b24d14ee52b0f4b1682259520b1"}, - {file = "watchdog-2.1.9-py3-none-win_amd64.whl", hash = "sha256:7a833211f49143c3d336729b0020ffd1274078e94b0ae42e22f596999f50279c"}, - {file = "watchdog-2.1.9-py3-none-win_ia64.whl", hash = "sha256:ad576a565260d8f99d97f2e64b0f97a48228317095908568a9d5c786c829d428"}, - {file = "watchdog-2.1.9.tar.gz", hash = "sha256:43ce20ebb36a51f21fa376f76d1d4692452b2527ccd601950d69ed36b9e21609"}, + {file = "watchdog-2.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a09483249d25cbdb4c268e020cb861c51baab2d1affd9a6affc68ffe6a231260"}, + {file = "watchdog-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5100eae58133355d3ca6c1083a33b81355c4f452afa474c2633bd2fbbba398b3"}, + {file = "watchdog-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e618a4863726bc7a3c64f95c218437f3349fb9d909eb9ea3a1ed3b567417c661"}, + {file = "watchdog-2.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:102a60093090fc3ff76c983367b19849b7cc24ec414a43c0333680106e62aae1"}, + {file = "watchdog-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:748ca797ff59962e83cc8e4b233f87113f3cf247c23e6be58b8a2885c7337aa3"}, + {file = "watchdog-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ccd8d84b9490a82b51b230740468116b8205822ea5fdc700a553d92661253a3"}, + {file = "watchdog-2.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6e01d699cd260d59b84da6bda019dce0a3353e3fcc774408ae767fe88ee096b7"}, + {file = "watchdog-2.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8586d98c494690482c963ffb24c49bf9c8c2fe0589cec4dc2f753b78d1ec301d"}, + {file = "watchdog-2.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:adaf2ece15f3afa33a6b45f76b333a7da9256e1360003032524d61bdb4c422ae"}, + {file = "watchdog-2.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83a7cead445008e880dbde833cb9e5cc7b9a0958edb697a96b936621975f15b9"}, + {file = "watchdog-2.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8ac23ff2c2df4471a61af6490f847633024e5aa120567e08d07af5718c9d092"}, + {file = "watchdog-2.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d0f29fd9f3f149a5277929de33b4f121a04cf84bb494634707cfa8ea8ae106a8"}, + {file = "watchdog-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:967636031fa4c4955f0f3f22da3c5c418aa65d50908d31b73b3b3ffd66d60640"}, + {file = "watchdog-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96cbeb494e6cbe3ae6aacc430e678ce4b4dd3ae5125035f72b6eb4e5e9eb4f4e"}, + {file = "watchdog-2.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61fdb8e9c57baf625e27e1420e7ca17f7d2023929cd0065eb79c83da1dfbeacd"}, + {file = "watchdog-2.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb5ecc332112017fbdb19ede78d92e29a8165c46b68a0b8ccbd0a154f196d5e"}, + {file = "watchdog-2.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a480d122740debf0afac4ddd583c6c0bb519c24f817b42ed6f850e2f6f9d64a8"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:978a1aed55de0b807913b7482d09943b23a2d634040b112bdf31811a422f6344"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:8c28c23972ec9c524967895ccb1954bc6f6d4a557d36e681a36e84368660c4ce"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_i686.whl", hash = "sha256:c27d8c1535fd4474e40a4b5e01f4ba6720bac58e6751c667895cbc5c8a7af33c"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d6b87477752bd86ac5392ecb9eeed92b416898c30bd40c7e2dd03c3146105646"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:cece1aa596027ff56369f0b50a9de209920e1df9ac6d02c7f9e5d8162eb4f02b"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:8b5cde14e5c72b2df5d074774bdff69e9b55da77e102a91f36ef26ca35f9819c"}, + {file = "watchdog-2.2.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e038be858425c4f621900b8ff1a3a1330d9edcfeaa1c0468aeb7e330fb87693e"}, + {file = "watchdog-2.2.1-py3-none-win32.whl", hash = "sha256:bc43c1b24d2f86b6e1cc15f68635a959388219426109233e606517ff7d0a5a73"}, + {file = "watchdog-2.2.1-py3-none-win_amd64.whl", hash = "sha256:17f1708f7410af92ddf591e94ae71a27a13974559e72f7e9fde3ec174b26ba2e"}, + {file = "watchdog-2.2.1-py3-none-win_ia64.whl", hash = "sha256:195ab1d9d611a4c1e5311cbf42273bc541e18ea8c32712f2fb703cfc6ff006f9"}, + {file = "watchdog-2.2.1.tar.gz", hash = "sha256:cdcc23c9528601a8a293eb4369cbd14f6b4f34f07ae8769421252e9c22718b6f"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 4c73d5e3..d793a29e 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } diff --git a/tools/Python.mk b/tools/Python.mk index 885dbcab..4ae26589 100644 --- a/tools/Python.mk +++ b/tools/Python.mk @@ -31,0 +32,5 @@ style: + +.PHONY: pip-audit +pip-audit: + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.2 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^hffs @/d')" +# ^ we remove problematic lines to have a working pip-audit. See https://github.com/pypa/pip-audit/issues/84#issuecomment-1326203111 for "requests" diff --git a/tools/PythonAudit.mk b/tools/PythonAudit.mk deleted file mode 100644 index e4276d6d..00000000 --- a/tools/PythonAudit.mk +++ /dev/null @@ -1,4 +0,0 @@ -.PHONY: pip-audit -pip-audit: - bash -c "poetry run pip-audit --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" -# ^ we remove problematic lines to have a working pip-audit. See https://github.com/pypa/pip-audit/issues/84#issuecomment-1326203111 for "requests" diff --git a/tools/PythonTest.mk b/tools/PythonTest.mk index b4c8e48d..7ec20501 100644 --- a/tools/PythonTest.mk +++ b/tools/PythonTest.mk @@ -0,0 +1,2 @@ +TEST_PATH ?= tests + @@ -4 +6 @@ test: - poetry run python -m pytest -vv -x ${ADDOPTS} tests + poetry run python -m pytest -vv -x ${ADDOPTS} $(TEST_PATH) @@ -10 +12 @@ debug: - poetry run python -m pytest -vv -x --log-cli-level=DEBUG --capture=tee-sys --pdb ${ADDOPTS} tests + poetry run python -m pytest -vv -x --log-cli-level=DEBUG --capture=tee-sys --pdb ${ADDOPTS} $(TEST_PATH) @@ -16 +18 @@ coverage: - poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests + poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term $(TEST_PATH) diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index d25532c4..2e8b5065 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -115 +115 @@ services: - worker-parquet: + worker-parquet-and-dataset-info: @@ -118 +118 @@ services: - # dockerfile: workers/parquet/Dockerfile + # dockerfile: workers/dataset_based/Dockerfile @@ -123,0 +124,21 @@ services: + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + DATASETS_BASED_ENDPOINT: "/parquet-and-dataset-info" # hard-coded + PARQUET_AND_DATASET_INFO_BLOCKED_DATASETS: ${PARQUET_AND_DATASET_INFO_BLOCKED_DATASETS-} + PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE: ${PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE-Update parquet files} + PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN: ${PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN-} + PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE: ${PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE-100_000_000} + PARQUET_AND_DATASET_INFO_SOURCE_REVISION: ${PARQUET_AND_DATASET_INFO_SOURCE_REVISION-main} + PARQUET_AND_DATASET_INFO_SUPPORTED_DATASETS: ${PARQUET_AND_DATASET_INFO_SUPPORTED_DATASETS-} + PARQUET_AND_DATASET_INFO_TARGET_REVISION: ${PARQUET_AND_DATASET_INFO_TARGET_REVISION-refs/convert/parquet} + PARQUET_AND_DATASET_INFO_URL_TEMPLATE: ${PARQUET_AND_DATASET_INFO_URL_TEMPLATE-/datasets/%s/resolve/%s/%s} + depends_on: + - mongodb + restart: always + worker-parquet: + # build: + # context: .. + # dockerfile: workers/dataset_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -129,8 +150,13 @@ services: - PARQUET_BLOCKED_DATASETS: ${PARQUET_BLOCKED_DATASETS-} - PARQUET_COMMIT_MESSAGE: ${PARQUET_COMMIT_MESSAGE-Update parquet files} - PARQUET_COMMITTER_HF_TOKEN: ${PARQUET_COMMITTER_HF_TOKEN-} - PARQUET_MAX_DATASET_SIZE: ${PARQUET_MAX_DATASET_SIZE-100_000_000} - PARQUET_SOURCE_REVISION: ${PARQUET_SOURCE_REVISION-main} - PARQUET_SUPPORTED_DATASETS: ${PARQUET_SUPPORTED_DATASETS-} - PARQUET_TARGET_REVISION: ${PARQUET_TARGET_REVISION-refs/convert/parquet} - PARQUET_URL_TEMPLATE: ${PARQUET_URL_TEMPLATE-/datasets/%s/resolve/%s/%s} + depends_on: + - mongodb + restart: always + worker-dataset-info: + # build: + # context: .. + # dockerfile: workers/dataset_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + DATASETS_BASED_ENDPOINT: "/dataset-info" # hard-coded diff --git a/workers/datasets_based/Makefile b/workers/datasets_based/Makefile index c5dda24f..8c09fd2f 100644 --- a/workers/datasets_based/Makefile +++ b/workers/datasets_based/Makefile @@ -16,6 +15,0 @@ run: - -.PHONY: pip-audit -pip-audit: - bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" -# ^ we remove problematic lines to have a working pip-audit. See https://github.com/pypa/pip-audit/issues/84#issuecomment-1326203111 for "requests" -# ^ we also ignore GHSA-47fc-vmwq-366v vulnerability because it has no fix for the moment diff --git a/workers/datasets_based/README.md b/workers/datasets_based/README.md index 893722fc..15d54069 100644 --- a/workers/datasets_based/README.md +++ b/workers/datasets_based/README.md @@ -3 +3 @@ -> Worker that pre-computes and caches the response to /splits, /first-rows or /parquet. +> Worker that pre-computes and caches the response to /splits, /first-rows or /parquet-and-dataset-info. @@ -13 +13 @@ Set environment variables to configure the datasets-based worker (`DATASETS_BASE -- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. Allowed values: `/splits`, `/first_rows`, and ` /parquet`. Defaults to `/splits`. +- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. Allowed values: `/splits`, `/first_rows`, and ` /parquet-and-dataset-info`. Defaults to `/splits`. @@ -50 +50 @@ Also, set the assets-related configuration for the first-rows worker. See [../.. -### Parquet worker +### Parquet and dataset info worker @@ -52 +52 @@ Also, set the assets-related configuration for the first-rows worker. See [../.. -Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/parquet`. +Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/parquet-and-dataset-info`. @@ -54 +54 @@ Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/parquet`. -Set environment variables to configure the parquet worker (`PARQUET_` prefix): +Set environment variables to configure the parquet worker (`PARQUET_AND_DATASET_INFO_` prefix): @@ -56,8 +56,8 @@ Set environment variables to configure the parquet worker (`PARQUET_` prefix): -- `PARQUET_BLOCKED_DATASETS`: comma-separated list of the blocked datasets. If empty, no dataset is blocked. Defaults to empty. -- `PARQUET_COMMIT_MESSAGE`: the git commit message when the worker uploads the parquet files to the Hub. Defaults to `Update parquet files`. -- `PARQUET_COMMITTER_HF_TOKEN`: the user token (https://huggingface.co/settings/tokens) to commit the parquet files to the Hub. The user must be allowed to create the `refs/convert/parquet` branch (see `PARQUET_TARGET_REVISION`) ([Hugging Face organization](https://huggingface.co/huggingface) members have this right). It must also have the right to push to the `refs/convert/parquet` branch ([Datasets maintainers](https://huggingface.co/datasets-maintainers) members have this right). It must have permission to write. If not set, the worker will fail. Defaults to None. -- `PARQUET_MAX_DATASET_SIZE`: the maximum size in bytes of the dataset to pre-compute the parquet files. Bigger datasets, or datasets without that information, are ignored. Defaults to `100_000_000`. -- `PARQUET_SOURCE_REVISION`: the git revision of the dataset to use to prepare the parquet files. Defaults to `main`. -- `PARQUET_SUPPORTED_DATASETS`: comma-separated list of the supported datasets. The worker does not test the size of supported datasets against the maximum dataset size. Defaults to empty. -- `PARQUET_TARGET_REVISION`: the git revision of the dataset where to store the parquet files. Make sure the committer token (`PARQUET_COMMITTER_HF_TOKEN`) has the permission to write there. Defaults to `refs/convert/parquet`. -- `PARQUET_URL_TEMPLATE`: the URL template to build the parquet file URLs. Defaults to `/datasets/%s/resolve/%s/%s`. +- `PARQUET_AND_DATASET_INFO_BLOCKED_DATASETS`: comma-separated list of the blocked datasets. If empty, no dataset is blocked. Defaults to empty. +- `PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE`: the git commit message when the worker uploads the parquet files to the Hub. Defaults to `Update parquet files`. +- `PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN`: the user token (https://huggingface.co/settings/tokens) to commit the parquet files to the Hub. The user must be allowed to create the `refs/convert/parquet` branch (see `PARQUET_AND_DATASET_INFO_TARGET_REVISION`) ([Hugging Face organization](https://huggingface.co/huggingface) members have this right). It must also have the right to push to the `refs/convert/parquet` branch ([Datasets maintainers](https://huggingface.co/datasets-maintainers) members have this right). It must have permission to write. If not set, the worker will fail. Defaults to None. +- `PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE`: the maximum size in bytes of the dataset to pre-compute the parquet files. Bigger datasets, or datasets without that information, are ignored. Defaults to `100_000_000`. +- `PARQUET_AND_DATASET_INFO_SOURCE_REVISION`: the git revision of the dataset to use to prepare the parquet files. Defaults to `main`. +- `PARQUET_AND_DATASET_INFO_SUPPORTED_DATASETS`: comma-separated list of the supported datasets. The worker does not test the size of supported datasets against the maximum dataset size. Defaults to empty. +- `PARQUET_AND_DATASET_INFO_TARGET_REVISION`: the git revision of the dataset where to store the parquet files. Make sure the committer token (`PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN`) has the permission to write there. Defaults to `refs/convert/parquet`. +- `PARQUET_AND_DATASET_INFO_URL_TEMPLATE`: the URL template to build the parquet file URLs. Defaults to `/datasets/%s/resolve/%s/%s`. diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index d517cf1d..07d27b95 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -3 +3 @@ name = "absl-py" -version = "1.3.0" +version = "1.4.0" @@ -108 +108 @@ name = "attrs" -version = "22.1.0" +version = "22.2.0" @@ -112 +112 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" @@ -115,4 +115,5 @@ python-versions = ">=3.5" -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] @@ -164 +165 @@ name = "black" -version = "22.10.0" +version = "22.12.0" @@ -233 +234 @@ name = "cachetools" -version = "5.2.0" +version = "5.2.1" @@ -325 +326 @@ name = "coverage" -version = "6.5.0" +version = "7.0.5" @@ -336 +337 @@ name = "crc32c" -version = "2.3" +version = "2.3.post0" @@ -352 +353 @@ name = "cyclonedx-python-lib" -version = "3.1.1" +version = "3.1.5" @@ -472 +473 @@ name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" @@ -497 +498 @@ name = "filelock" -version = "3.8.1" +version = "3.9.0" @@ -504,2 +505,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -522 +523 @@ name = "flatbuffers" -version = "22.11.23" +version = "23.1.4" @@ -581 +582 @@ name = "gdown" -version = "4.5.4" +version = "4.6.0" @@ -607 +608 @@ name = "gitpython" -version = "3.1.29" +version = "3.1.30" @@ -618 +619 @@ name = "google-auth" -version = "2.15.0" +version = "2.16.0" @@ -634,0 +636 @@ reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0dev)"] @@ -768 +770 @@ name = "importlib-metadata" -version = "5.1.0" +version = "6.0.0" @@ -778 +780 @@ zipp = ">=0.5" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] @@ -797,2 +799,2 @@ name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" @@ -801 +803 @@ optional = false -python-versions = "*" +python-versions = ">=3.7" @@ -805 +807 @@ name = "isort" -version = "5.10.1" +version = "5.11.4" @@ -809 +811 @@ optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" @@ -850 +852 @@ name = "keras" -version = "2.10.0" +version = "2.11.0" @@ -854,18 +856 @@ optional = false -python-versions = "*" - -[[package]] -name = "keras-preprocessing" -version = "1.1.2" -description = "Easy data preprocessing and data augmentation for deep learning models" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -numpy = ">=1.9.1" -six = ">=1.9.0" - -[package.extras] -image = ["Pillow (>=5.2.0)", "scipy (>=0.14)"] -pep8 = ["flake8"] -tests = ["Pillow", "keras", "pandas", "pytest", "pytest-cov", "pytest-xdist", "tensorflow"] +python-versions = ">=3.7" @@ -883 +868 @@ name = "libclang" -version = "14.0.6" +version = "15.0.6.1" @@ -891 +876 @@ name = "libcommon" -version = "0.6.1" +version = "0.6.4" @@ -909 +894 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl" @@ -968 +953 @@ name = "lxml" -version = "4.9.1" +version = "4.9.2" @@ -1056 +1041 @@ name = "multidict" -version = "6.0.3" +version = "6.0.4" @@ -1140 +1125 @@ name = "nltk" -version = "3.7" +version = "3.8.1" @@ -1180,0 +1166,48 @@ python-versions = ">=3.8" +[[package]] +name = "nvidia-cublas-cu11" +version = "11.10.3.66" +description = "CUBLAS native runtime libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-nvrtc-cu11" +version = "11.7.99" +description = "NVRTC native runtime libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-runtime-cu11" +version = "11.7.99" +description = "CUDA Runtime native Libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cudnn-cu11" +version = "8.5.0.96" +description = "cuDNN runtime libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + @@ -1222 +1255 @@ name = "orjson" -version = "3.8.3" +version = "3.8.5" @@ -1242 +1275 @@ name = "packaging" -version = "21.3" +version = "23.0" @@ -1246,4 +1279 @@ optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" @@ -1269 +1299 @@ name = "pandas-stubs" -version = "1.5.2.221124" +version = "1.5.2.230105" @@ -1280 +1310 @@ name = "pathspec" -version = "0.10.2" +version = "0.10.3" @@ -1288 +1318 @@ name = "pbr" -version = "5.11.0" +version = "5.11.1" @@ -1296 +1326 @@ name = "pillow" -version = "9.3.0" +version = "9.4.0" @@ -1303 +1333 @@ python-versions = ">=3.7" -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] @@ -1327 +1357 @@ name = "pip-audit" -version = "2.4.7" +version = "2.4.13" @@ -1337 +1367 @@ html5lib = ">=1.1" -packaging = ">=21.0.0" +packaging = ">=23.0.0" @@ -1339 +1369 @@ pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" +pip-requirements-parser = ">=32.0.0" @@ -1346 +1376 @@ dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] +lint = ["black (>=22.3.0)", "interrogate", "isort", "mypy", "pdoc3", "ruff (<0.0.218)", "types-html5lib", "types-requests", "types-toml"] @@ -1351 +1381 @@ name = "pip-requirements-parser" -version = "31.2.0" +version = "32.0.1" @@ -1355 +1385 @@ optional = false -python-versions = ">=3.6.*" +python-versions = ">=3.6.0" @@ -1358,0 +1389 @@ packaging = "*" +pyparsing = "*" @@ -1362 +1393 @@ docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] +testing = ["aboutcode-toolkit (>=6.0.0)", "black", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] @@ -1366 +1397 @@ name = "platformdirs" -version = "2.5.4" +version = "2.6.2" @@ -1373,2 +1404,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -1419 +1450 @@ name = "proto-plus" -version = "1.22.1" +version = "1.22.2" @@ -1572 +1603 @@ name = "pygments" -version = "2.13.0" +version = "2.14.0" @@ -1645 +1676 @@ name = "pytest" -version = "7.2.0" +version = "7.2.1" @@ -1703 +1734 @@ name = "pytz" -version = "2022.6" +version = "2022.7.1" @@ -1743 +1774 @@ name = "requests" -version = "2.28.1" +version = "2.28.2" @@ -1751 +1782 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" @@ -1823 +1854 @@ name = "rich" -version = "12.6.0" +version = "13.1.0" @@ -1827 +1858 @@ optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" @@ -1849 +1880 @@ name = "scikit-learn" -version = "1.1.3" +version = "1.2.0" @@ -1856 +1887 @@ python-versions = ">=3.8" -joblib = ">=1.0.0" +joblib = ">=1.1.1" @@ -1862,4 +1893,4 @@ threadpoolctl = ">=2.0.0" -benchmark = ["matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] -examples = ["matplotlib (>=3.1.2)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] -tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.2)", "mypy (>=0.961)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pyamg (>=4.0.0)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.16.2)"] +benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy (>=0.961)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=5.3.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.16.2)"] @@ -1869 +1900 @@ name = "scipy" -version = "1.9.3" +version = "1.10.0" @@ -1873 +1904 @@ optional = false -python-versions = ">=3.8" +python-versions = "<3.12,>=3.8" @@ -1876 +1907 @@ python-versions = ">=3.8" -numpy = ">=1.18.5,<1.26.0" +numpy = ">=1.19.5,<1.27.0" @@ -1879,3 +1910,3 @@ numpy = ">=1.18.5,<1.26.0" -dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"] -doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"] -test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] +doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] @@ -1885 +1916 @@ name = "setuptools" -version = "65.6.3" +version = "66.0.0" @@ -1892 +1923 @@ python-versions = ">=3.7" -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -1955 +1986 @@ name = "tensorboard" -version = "2.10.1" +version = "2.11.2" @@ -1959 +1990 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -1968 +1999 @@ numpy = ">=1.12.0" -protobuf = ">=3.9.2,<3.20" +protobuf = ">=3.9.2,<4" @@ -1994 +2025 @@ name = "tensorflow" -version = "2.10.1" +version = "2.11.0" @@ -2008,2 +2039 @@ h5py = ">=2.9.0" -keras = ">=2.10.0,<2.11" -keras-preprocessing = ">=1.1.1" +keras = ">=2.11.0,<2.12" @@ -2017,3 +2047,3 @@ six = ">=1.12.0" -tensorboard = ">=2.10,<2.11" -tensorflow-estimator = ">=2.10.0,<2.11" -tensorflow-io-gcs-filesystem = ">=0.23.1" +tensorboard = ">=2.11,<2.12" +tensorflow-estimator = ">=2.11.0,<2.12" +tensorflow-io-gcs-filesystem = {version = ">=0.23.1", markers = "platform_machine != \"arm64\" or platform_system != \"Darwin\""} @@ -2026 +2056 @@ name = "tensorflow-estimator" -version = "2.10.0" +version = "2.11.0" @@ -2034 +2064 @@ name = "tensorflow-io-gcs-filesystem" -version = "0.28.0" +version = "0.29.0" @@ -2038 +2068 @@ optional = false -python-versions = ">=3.7, <3.11" +python-versions = ">=3.7, <3.12" @@ -2049 +2079 @@ name = "tensorflow-macos" -version = "2.10.0" +version = "2.11.0" @@ -2063,2 +2093 @@ h5py = ">=2.9.0" -keras = ">=2.10.0,<2.11" -keras-preprocessing = ">=1.1.1" +keras = ">=2.11.0,<2.12" @@ -2072,2 +2101,3 @@ six = ">=1.12.0" -tensorboard = ">=2.10,<2.11" -tensorflow-estimator = ">=2.10.0,<2.11" +tensorboard = ">=2.11,<2.12" +tensorflow-estimator = ">=2.11.0,<2.12" +tensorflow-io-gcs-filesystem = {version = ">=0.23.1", markers = "platform_machine != \"arm64\" or platform_system != \"Darwin\""} @@ -2080 +2110 @@ name = "termcolor" -version = "2.1.1" +version = "2.2.0" @@ -2157 +2187 @@ name = "torch" -version = "1.10.2" +version = "1.13.1" @@ -2161 +2191 @@ optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" @@ -2163,0 +2194,4 @@ python-versions = ">=3.6.2" +nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\""} +nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} +nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} +nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\""} @@ -2165,0 +2200,3 @@ typing-extensions = "*" +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] + @@ -2168 +2205 @@ name = "torchaudio" -version = "0.10.2" +version = "0.13.1" @@ -2175 +2212 @@ python-versions = "*" -torch = "1.10.2" +torch = "*" @@ -2308 +2345 @@ name = "types-pytz" -version = "2022.6.0.1" +version = "2022.7.1.0" @@ -2316 +2353 @@ name = "types-requests" -version = "2.28.11.5" +version = "2.28.11.7" @@ -2343 +2380 @@ name = "ujson" -version = "5.6.0" +version = "5.7.0" @@ -2351 +2388 @@ name = "urllib3" -version = "1.26.13" +version = "1.26.14" @@ -2413 +2450 @@ name = "xxhash" -version = "3.1.0" +version = "3.2.0" @@ -2460 +2497 @@ python-versions = "3.9.15" -content-hash = "b596bc1e1812eeab2eade45c68f8f0d985404600bb94cb2b14012c8f9f0ae84e" +content-hash = "d83bcdcbaefeb20ef577109e64bb0f7a0975628350c85357b57a494fb26eceff" @@ -2464,2 +2501,2 @@ absl-py = [ - {file = "absl-py-1.3.0.tar.gz", hash = "sha256:463c38a08d2e4cef6c498b76ba5bd4858e4c6ef51da1a5a1f27139a022e20248"}, - {file = "absl_py-1.3.0-py3-none-any.whl", hash = "sha256:34995df9bd7a09b3b8749e230408f5a2a2dd7a68a0d33c12a3d0cb15a041a507"}, + {file = "absl-py-1.4.0.tar.gz", hash = "sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d"}, + {file = "absl_py-1.4.0-py3-none-any.whl", hash = "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47"}, @@ -2600,2 +2637,2 @@ attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -2615,21 +2652,12 @@ black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, @@ -2761,2 +2789,2 @@ cachetools = [ - {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, - {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, + {file = "cachetools-5.2.1-py3-none-any.whl", hash = "sha256:8462eebf3a6c15d25430a8c27c56ac61340b2ecf60c9ce57afc2b97e450e47da"}, + {file = "cachetools-5.2.1.tar.gz", hash = "sha256:5991bc0e08a1319bb618d3195ca5b6bc76646a49c21d55962977197b301cc1fe"}, @@ -2862,50 +2890,51 @@ coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, @@ -2914,67 +2943,78 @@ crc32c = [ - {file = "crc32c-2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82942ed343e5c884b5c0c9aa6bb5bb47de0247df95ce5d154cc48744d5c2ffd4"}, - {file = "crc32c-2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f641a9bd24a309637cca6c119b8aabdfe6d41bab5ea630124ee9be7891e36ba1"}, - {file = "crc32c-2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:374d288cc1735932276bc65670db329dd9fe2af4ec323599dc40e1212b13985e"}, - {file = "crc32c-2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b7c71a3ae1511c42b7919e6116560c08ba89479ea249f281c5bfba2b619411d"}, - {file = "crc32c-2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f524fd202472d041b9bddb4a51b5fff28767a9c69953dbcdeecc67ef65707c07"}, - {file = "crc32c-2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a070dbe10dac29c2f591a59300c37448e3c7a747b6ea18d4826b7c94a956bd"}, - {file = "crc32c-2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ab9df0bd9bf10f3d5bd346321d48da8a28392b1f48f7a6fa3234acebe6ee448"}, - {file = "crc32c-2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8948a9262d36e2aad3be74aac3ce7a1b090ab2361f7619b3f23418fa536f1b25"}, - {file = "crc32c-2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:865bf66d86809971d4856e38085a4a15a7251b8e780f22ad52e12b50784dac25"}, - {file = "crc32c-2.3-cp310-cp310-win32.whl", hash = "sha256:e14f4d57e004fa5a6100ea3aeb9574bee6f95965a96a382154fa40aee1fdeb5e"}, - {file = "crc32c-2.3-cp310-cp310-win_amd64.whl", hash = "sha256:ca03d8d5b35a26e0d3eb8c7121de3e37a59042735029eabcf1c4b15343f82cdd"}, - {file = "crc32c-2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5612be1606eec55511ade38deec40c9f1c7647ec0407a4031e0a2e6e6a635f27"}, - {file = "crc32c-2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab21f02c13dc5a0411838d0709cb4d24bcb865ea28b683b7403826c08d14e27"}, - {file = "crc32c-2.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c1f3e28b8aec8a0f7727337fafa31f0ace38e59e054c51fecb923535c6dc6e6"}, - {file = "crc32c-2.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed14214fcc1416e0dc63be4c88aad7f58e0f0cb2c22d578b861e8fc19d1b2d2f"}, - {file = "crc32c-2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1d334d51d395f78fb649e8442341da782e63d3f9552fcfbc040995d24d4b794d"}, - {file = "crc32c-2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5ddf91756d6275f497d0895b8875d1f1fdac6be08a5900f4123ede2c91cd1422"}, - {file = "crc32c-2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5aa6383c0a13a542c3f1eb82a02e29c1141e0a2bc63faedd0062d1c41649989f"}, - {file = "crc32c-2.3-cp36-cp36m-win32.whl", hash = "sha256:ef1165f7f36edaae03fcf03f1ca3bdbf196a5255d656bfb17959ba0405a2c8ee"}, - {file = "crc32c-2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f1679f7f700f2aec3dbee4e357a2fdde53e2ec151dde4e0b52a9205fac273a90"}, - {file = "crc32c-2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c04a27ba3cbc7a9e34c77f402bd3a83442a2c7acd3897d2539b1a3321ed28a6a"}, - {file = "crc32c-2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51ac079c44297bbf624a598cffe6f85bd0a5faf780fd75d2d5e531d42d427ef"}, - {file = "crc32c-2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb1fea3d9ec71f353a6c38648d074e722fff1f43c1998ae6088dbee324a1ca6"}, - {file = "crc32c-2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b917b73d810bcdbcd1461978ba55038dcf2bbc3b56704b0082d2f9b0d5edc7ad"}, - {file = "crc32c-2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0369e637d13db5c06e45a34b069ff2ba292ac881e8a44a8658ccf3edaa9c392f"}, - {file = "crc32c-2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:47088e524a9ec2887ae0ec519d75df40f005debf9d52f10e688f27e7cc0d339c"}, - {file = "crc32c-2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fddf16ed92dcb8ee34a12bd0757d5719d3c750a9dc813d82972477885b114339"}, - {file = "crc32c-2.3-cp37-cp37m-win32.whl", hash = "sha256:3f372a53e9cf2464421b82b41fb66d98f654284c8fc4363f51bb0f5485fdc2b4"}, - {file = "crc32c-2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4d223e844ee61ac492f0197b62ccc2a9c23db15e4d2938e698fec6eded0daf15"}, - {file = "crc32c-2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4323f56908b7e5cea039122aad039fcf750974b09e4f993244d4dddb24cab561"}, - {file = "crc32c-2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fac1b4248625acd65985378f6b34a00b73cfc9db5b8ccc73101744de2e3dfa66"}, - {file = "crc32c-2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9ce72a40c17636af97e37bad2f2c11a2e740f57d4051ef586c04d1aa83db8b38"}, - {file = "crc32c-2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9bc7e5599f5970fff1f9aa551639336a76d1bb1fb00f0b87704049df8ba035"}, - {file = "crc32c-2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:682974e2cfb199ebc4adc5eb4d493dbcf83812a031a8ecccae5a7b5bcade5d9f"}, - {file = "crc32c-2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:255e35719c252ce7609cb3f1c5a045783a6e0d6d7b035d507ddd82d5194c236a"}, - {file = "crc32c-2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df19ab6ab3884a237388c7720b1fe617dd4893305f62383d0f96fc7980dfdf7c"}, - {file = "crc32c-2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:61479a60d5a2b3160a4ae17b37df119963a741fd61ca71d4792670cdf7d7ea41"}, - {file = "crc32c-2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e6e16d57b8103fee9fdecb38e908d9ceb70d2196bb932dba64bf7b570f44c0b9"}, - {file = "crc32c-2.3-cp38-cp38-win32.whl", hash = "sha256:ad83e4c78379cc3e22b760e9874bc57f91a9cfb85107ccba1c6442bc1a2e2a1c"}, - {file = "crc32c-2.3-cp38-cp38-win_amd64.whl", hash = "sha256:32c573dd861933e2390932cc10e1b78d71ee7827ee4dfcec96e23cf007a1a6d3"}, - {file = "crc32c-2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ad57917650af59c989b62184fc4604d6c5066fc030ced4c6e07a596000f1ab86"}, - {file = "crc32c-2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5e076ae46ac0e4e28eb43932c5c0b8e1b8751bb7d1b0d239f18230aed7cca3bf"}, - {file = "crc32c-2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:896bda76db13f229c1126d5e384673f78e06685e70d76fff4c5a3f65b4068b4d"}, - {file = "crc32c-2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bc2a9ccfa7c02bb8a5346fd546b65ed265965e7fea768c7f2681f2b68d6a0"}, - {file = "crc32c-2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6872d8728f30f2a13f95762801428cf92a7ee6f170c872be81a17b1549b69131"}, - {file = "crc32c-2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:327e44184826cd1c72bcd4a9b2c4badfd29501333e158460c7d3ad8b7f066588"}, - {file = "crc32c-2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:866d1cbe646bdef67fc225371da265f081809bcf238bf562d6874c97e7fcb0d6"}, - {file = "crc32c-2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c59c6ea67ab927b2ab958c7b01a6b17c9cad882e7a1da51b9c35fbc9874ff46a"}, - {file = "crc32c-2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27116037f97a02f1a123ca82008ee993c28afe8590e047a6cd86aca33653cca"}, - {file = "crc32c-2.3-cp39-cp39-win32.whl", hash = "sha256:90c46644225dc7f71b4dd499ed71ada59d061fd60aa55233270d088ee8cfcd13"}, - {file = "crc32c-2.3-cp39-cp39-win_amd64.whl", hash = "sha256:a2427a9196c2b8b1c27d7e31cc5c9fff13af0b1411ff1565459f65554990f055"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5a13d41a29d3feea5ba87def9d4dccc3362139345a24997de33fad00b656622b"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8363b553b33719b37fff46378a6e96106fd9232d2e043eebb6c6da46925c7663"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ec3d9257d0624fb74335f67592b6a30de5e0cfb60322ed8682e35820decac8f"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d82fa5bb0661a7a508e62730d4d9045f53d4ab6a9211b560a014f1d58a8337cb"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5f347244590f294eaea2e92546100bd56db926305e0603a0d57a88e59f86b308"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dce1deda03c6dbe0f5ae6e3e0f8671caead64075fd19a61b1700d42a88af97c8"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7d568eb07473d9bc6fb413a4d3248265212c537b80d494ab884cc5316589110"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5560faa3f673183eb1e2fc2c1361cc9ab86865a1d5774baf61fec9ca6c1a696"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8067ce072908626869b583700da6b4bfc9a538975d77232ae68a31d8af5f1ff6"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:250af144edce7850a35c618b4dd1bf56436e031560228c17a7c78bf29239ceb0"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4ac8738e9cd28948e40fb3a3c89a44660e4ad266f7726964200224e101f5c8ef"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c74d81a00972cbe65e27e99838b44ed5e04bced971e5bfa01c27a4bd17138442"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a423c098ceffbd70544d1de3e00eeb45ec4b8463ab5d8005389fbbf3243314d1"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04c44ad7cde9c21ad426bdfa675ba7039db82a6961c99690f9d2ff2f034c892"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cea0fe7053e36a4809e5bf95989552f52c98bbc94dca9062fb5b8c976daa0f32"}, - {file = "crc32c-2.3.tar.gz", hash = "sha256:17ce6c596ad0d53df52dcd72defb66984aeabd98fbefea7ba848a6b6bdece36a"}, + {file = "crc32c-2.3.post0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e311f52e24b633e8d588ab9a0e7992bfcfe8284a1655202bdac5aee80254a3fd"}, + {file = "crc32c-2.3.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4459462c732232ffb29b58decd246ed5cdb8c16ae141f57f03cb2e3445dc1d2e"}, + {file = "crc32c-2.3.post0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1285d33cdeda2d2248994d41706f88f0fe58265ae907d23221c07028e79f9670"}, + {file = "crc32c-2.3.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c24ebb561e10a5eada2128a7357e41969155cebe7b34656176fc24412d45c8b"}, + {file = "crc32c-2.3.post0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb4222a766f59b1cd8cbe56af5dbdfd3a2c0ec40b60c9ee6efe4a5cabc94112d"}, + {file = "crc32c-2.3.post0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:217c1b64be777cf235556066c363f4dec22b29a956a174f6361037b1b2065c63"}, + {file = "crc32c-2.3.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bb11668f75a7f4f699b9a125aaf15259687f691beb95e756e3bea80d7163645"}, + {file = "crc32c-2.3.post0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04220e1db5567dc234d1e9dc182c5b8241905057ec19967ac3a917bcaf06d70e"}, + {file = "crc32c-2.3.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dbae415e9ec7dfdcdeac981cf4833d9942ce9de175b2be5a21c641c3a88e609b"}, + {file = "crc32c-2.3.post0-cp310-cp310-win32.whl", hash = "sha256:7fb366626bf7ef66e55656c8385fcc94f22f8d3847a7a84c810d2e3f63f54c62"}, + {file = "crc32c-2.3.post0-cp310-cp310-win_amd64.whl", hash = "sha256:01787094f281ae7c8f645d7b3c309a02bac45cb385206eee651aa27d933a87e5"}, + {file = "crc32c-2.3.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a3ff6b893ab482f0841a2b7e394adb749b1a896c854ce92f72c60e2ea3a3553"}, + {file = "crc32c-2.3.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:585ab3307a2aa73b935f0b0358197f0af5306204d646ac321ecf01f2a3725f94"}, + {file = "crc32c-2.3.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acb0d4a1cf19fdc2946ab9b1dc5d4f1347e97b356a863fbba2d8a3d3c1cbe815"}, + {file = "crc32c-2.3.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75e9a588e7241d09de9023dc51174cc2c9ac7c453ae0e26a5718e266b48ae392"}, + {file = "crc32c-2.3.post0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5be4ad72c198c4a22515ef2ad728f9829ee3d75e6c7f3e41030c8266e46c0c7c"}, + {file = "crc32c-2.3.post0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08fdba1351d5cbb428d9ba3ce5c03d43687e7b23c6bc0cf99973306e5549dab9"}, + {file = "crc32c-2.3.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:21919937ecac802e436c1a9978e8b27522ca87bf67dc5ce3a5b5622c0b5c3a06"}, + {file = "crc32c-2.3.post0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c102e8988618e3bd15f4297ab95631c6d9e59326f9af17bf7d71c2ad4639a7f7"}, + {file = "crc32c-2.3.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6072cc60102a8ac86f45ab8d29c8679a8ac0445477eed3d0af6d0becceea1392"}, + {file = "crc32c-2.3.post0-cp311-cp311-win32.whl", hash = "sha256:295053584dc3a11d8f02d6ccc6dd3698331e252cd816d7652b0723c516ef3c41"}, + {file = "crc32c-2.3.post0-cp311-cp311-win_amd64.whl", hash = "sha256:05ad2f6b6392b2a0af159142e6ec029cddd15d67a76b7762b3316cbb5cc8e22c"}, + {file = "crc32c-2.3.post0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c002f55429a12ec87a0b33a073b384d26edd46d89b3cf7cfb6ddf5abb6e2bfab"}, + {file = "crc32c-2.3.post0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec216adbbf7ad1506918c8199a144d26740650b594f79755f5f1affec7e7820b"}, + {file = "crc32c-2.3.post0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38f4c8eaa77fa9bbe690b58546dd3f2e244c13d5d0a01fa93076d3817a22bc68"}, + {file = "crc32c-2.3.post0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:247ada85bd0a54012e910ff46697871f80bcff9018b59f7de23161726a146b2a"}, + {file = "crc32c-2.3.post0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:363b8f2993f07eb8ac665c7227cb2a569fb1f4eb1551a05695bc2f94c23307e4"}, + {file = "crc32c-2.3.post0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8af10d0c3752db01dfa77c6c4c8fa070bdefe939eb9ab94b4dec1dcc2cc11fa5"}, + {file = "crc32c-2.3.post0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b60dd506177d2ea68fb548caa9cc383f46c024947d1990604c11aa615ec9da62"}, + {file = "crc32c-2.3.post0-cp36-cp36m-win32.whl", hash = "sha256:305ca4eb8c399081a68ca0274ae176753be8430fc874d1e7397a2cbc95748733"}, + {file = "crc32c-2.3.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:5c9e58f96a0e56e60ea683504f605b76c73b7f885837bfc8bd8346b054133045"}, + {file = "crc32c-2.3.post0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ef9ebdfe4f2e45c94327a1a9a222a899be784d78674065ada6e8e240d3a4a8"}, + {file = "crc32c-2.3.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85b3395c476916b098a20cfb8686558865ca3ef71caaf9e6b0a548b2049ee87d"}, + {file = "crc32c-2.3.post0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42c0363d68a95d133af02803772395b42bc202840ea70a317e2b46beb9e53af0"}, + {file = "crc32c-2.3.post0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eb7cc4200745cbc717bb0b83b538e6582be980d4f8c9f9bb0740a23e93a4d8"}, + {file = "crc32c-2.3.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d7bcc07f3a63cc8be7536ca35a4c5b96763b8e0ce5d48f30d9374ec7e381b057"}, + {file = "crc32c-2.3.post0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:89bcf1158c577635bdc61b27d29deb2fe0c1191a54a490f976563a73abe3b2e3"}, + {file = "crc32c-2.3.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d18e82f66a0c25c8fb10b21e71cf2f2aa81441fd1a062809249e98a338ef9c81"}, + {file = "crc32c-2.3.post0-cp37-cp37m-win32.whl", hash = "sha256:31f59b051a7fa4a2ada3f76a79014be38c45e9d3c906eca381e9007677ffe506"}, + {file = "crc32c-2.3.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:19d6a505582194ed0b2bb257cf3729c922e7e92f457a9f7f5493cf821cb19afc"}, + {file = "crc32c-2.3.post0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cce0c79dbf4d4f2276cb9e32f4d0dfc3ce5d8cc5c3f0a0bc62612505cd779c67"}, + {file = "crc32c-2.3.post0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:86837a00716056c29f9d84c980cdac050ba3c7610c9edca1b2ac01192715725d"}, + {file = "crc32c-2.3.post0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a61e6754ee54bbae9253035eabb5658d0ada9162eb1b98feabefb044b95e6c0"}, + {file = "crc32c-2.3.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:470bb05c224ede904af0278d18ac26f1132a8cfc7a11cfa0406c26ef75765427"}, + {file = "crc32c-2.3.post0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de0307898c24a8ae29d4b94ddd3aa81b73d3b2b0e490d226e3a3dd05526dfca1"}, + {file = "crc32c-2.3.post0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0925a7a8fcc216744e86c21f1749c22f950f9bdad512cb5c80ee85017625bd5"}, + {file = "crc32c-2.3.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d9ea9e6eb2912051a40ac6877646925ee3a058c4aa3868988fe1d8c4577f57d4"}, + {file = "crc32c-2.3.post0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6bfc277c43fa1dd4c4a91a1b56347008e34c8214dd99b1424b5d636272f2922c"}, + {file = "crc32c-2.3.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:771ad4abacb89a14e1f5cbbb291ead652a0c9ed4be2c9b579c869957c0b03e3d"}, + {file = "crc32c-2.3.post0-cp38-cp38-win32.whl", hash = "sha256:a57d1ec8f1aa45c14a51770b011359b511eb7dcc6ffd7c8fc9e918e2aa009416"}, + {file = "crc32c-2.3.post0-cp38-cp38-win_amd64.whl", hash = "sha256:05b69167116680bd40116c8fac847950d1eb170fdd42a814602223b4e002b0bb"}, + {file = "crc32c-2.3.post0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cee4275dfb3cfc4a1e4c338089f3223fce878d7151cebb095937c07410371908"}, + {file = "crc32c-2.3.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:176b3c5ff7fa4e2f83c241ab9dc4fd1584d1c9333d4c7295c16c9f6097c29933"}, + {file = "crc32c-2.3.post0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68941ac55632f231120baeeba72690cdf2ec2531fea3ceab4612dbf855411b05"}, + {file = "crc32c-2.3.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00de0d157dc17bbf01fef615aff6095a78b3561aad37b4ba4a300b11311aae55"}, + {file = "crc32c-2.3.post0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:061b084e27d92dc3e1a9efd500e6e3feee9e97e8cefe2fbdcc0011cba7f3242a"}, + {file = "crc32c-2.3.post0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2f6105a430f4f1f393cc660bf8ac3a4860396fa9b5ac8bf0c7ba1de044a3cbc"}, + {file = "crc32c-2.3.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69cd27f493a6bcbeb1b59eea4a978734ebf3ce2b6f757a99405d6eebd38af551"}, + {file = "crc32c-2.3.post0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7d02e45cddebfa82694fc9fc7df2f42366431e90b0abd40c5c63758bc9234123"}, + {file = "crc32c-2.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0b5f5a18e9bfe98a273d9618d5c04470fe983dcebcd453fac07c398dfac7db10"}, + {file = "crc32c-2.3.post0-cp39-cp39-win32.whl", hash = "sha256:e88bbdeba430dfee6d83192a9e55c8e89884a3c5215d1b7643395ecbbd1b502b"}, + {file = "crc32c-2.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:c55e8a45e360aeb3cea2cf9d9fb3771a711ed3c3fce2d91c874d767aae4f5cee"}, + {file = "crc32c-2.3.post0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b3628ff77ca4cf3c3f0209d5eb824b79d8e324bbb1feaff3fb6bff8adc23ec08"}, + {file = "crc32c-2.3.post0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ef1b87ac755e20933bc8136a45ca9993a03c0b0ba16dd946ab287108305332b"}, + {file = "crc32c-2.3.post0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ebbcde06765fcde3d2d440153839a9ac675866fb25aa86219595c370e6d3f7c"}, + {file = "crc32c-2.3.post0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:335f1fcd5fb346be4ac4c982f89bb66b39c93a2c2d4bcdb3e3188d8adcb225b2"}, + {file = "crc32c-2.3.post0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b6a4df4a978dbf43e548a008dc4686f6e24d52defb8c03a79b67aebfeaa2caa6"}, + {file = "crc32c-2.3.post0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:129b0ac8ee712ce42aae36d7e6e5202ab080f06117f57ba2c894226586e80050"}, + {file = "crc32c-2.3.post0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ad74ec499a3d6981900c1e2873b1e6a19e2ee3c650a3e611c3076ad9167f3a"}, + {file = "crc32c-2.3.post0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69cb66a0c680ae531df7f32833a3d6df26aeeb144c0f7a8899d2d5bb7c9cdc2c"}, + {file = "crc32c-2.3.post0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b823aacfefe0001a08667d7a4d7dd87133537e3628ed581fa416829a5dad26fd"}, + {file = "crc32c-2.3.post0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a4d98142f6e40dec28994846a6acafd96ba822d81b3c6671633fb11d41692c32"}, + {file = "crc32c-2.3.post0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bce06be1d9aa7e4b3e3038fe80facafa3526bec9e484ec089c035b8018727c1b"}, + {file = "crc32c-2.3.post0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0467261c67377a92ad6665a9590b3820cfb12d59c3c6ccac6326200e032ddda4"}, + {file = "crc32c-2.3.post0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1101b4e680085ea7c01074d38378610392262bc56936ec17eed61f1372197193"}, + {file = "crc32c-2.3.post0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80ddf6b0594bb980a635ff4818c0c64927193c1a09e8b5b6986769e94a7ba9ee"}, + {file = "crc32c-2.3.post0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3e547c06a1dda463daf398661af6bda767debe0097630b48c463605e38ade31a"}, + {file = "crc32c-2.3.post0.tar.gz", hash = "sha256:7d4b39ca6791830c4f1c053d2d8983627af702f0445535ff53d3220f35cf6ce6"}, @@ -2986,2 +3026,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, - {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, + {file = "cyclonedx_python_lib-3.1.5-py3-none-any.whl", hash = "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6"}, + {file = "cyclonedx_python_lib-3.1.5.tar.gz", hash = "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3"}, @@ -3016,2 +3056,2 @@ exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -3043,2 +3083,2 @@ filelock = [ - {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, - {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, @@ -3051,2 +3091,2 @@ flatbuffers = [ - {file = "flatbuffers-22.11.23-py2.py3-none-any.whl", hash = "sha256:13043a5deba77e55b73064750195d2c5b494754d52b7d4ad01bc52cad5c3c9f2"}, - {file = "flatbuffers-22.11.23.tar.gz", hash = "sha256:2a82b85eea7f6712ab41077086dae1a89382862fe64414c8ebdf976123d1a095"}, + {file = "flatbuffers-23.1.4-py2.py3-none-any.whl", hash = "sha256:8bf47bcaef0deac76ae95586032e867e1d6d8fd429d00ca8d3d01e43fd3d1f8f"}, + {file = "flatbuffers-23.1.4.tar.gz", hash = "sha256:04d2141ea38866600beda17ffebf739b23f4f500cc22606076cc83079155106d"}, @@ -3139,2 +3179,2 @@ gdown = [ - {file = "gdown-4.5.4-py3-none-any.whl", hash = "sha256:99b99d537eb6dee3cfffeafb73e5558c347ca2a97a59864c100e6c0bb5f42a95"}, - {file = "gdown-4.5.4.tar.gz", hash = "sha256:6aff67d1eb22fb3a5aed2b4563794aa3506c72df083f86b1ec493252709ca68f"}, + {file = "gdown-4.6.0-py3-none-any.whl", hash = "sha256:e75c5aa8be8ea1cac642d4793f884339d887ab5e07aaa57fafa16c8a56a0cde5"}, + {file = "gdown-4.6.0.tar.gz", hash = "sha256:5ce3db0aeda54f46caacb2df86f31c3e3ecd17c355689e6456d85fb528ba9749"}, @@ -3147,2 +3187,2 @@ gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, @@ -3151,2 +3191,2 @@ google-auth = [ - {file = "google-auth-2.15.0.tar.gz", hash = "sha256:72f12a6cfc968d754d7bdab369c5c5c16032106e52d32c6dfd8484e4c01a6d1f"}, - {file = "google_auth-2.15.0-py2.py3-none-any.whl", hash = "sha256:6897b93556d8d807ad70701bb89f000183aea366ca7ed94680828b37437a4994"}, + {file = "google-auth-2.16.0.tar.gz", hash = "sha256:ed7057a101af1146f0554a769930ac9de506aeca4fd5af6543ebe791851a9fbd"}, + {file = "google_auth-2.16.0-py2.py3-none-any.whl", hash = "sha256:5045648c821fb72384cdc0e82cc326df195f113a33049d9b62b74589243d2acc"}, @@ -3253,2 +3293,2 @@ importlib-metadata = [ - {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, - {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, @@ -3328,2 +3368,2 @@ iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -3332,2 +3372,2 @@ isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, @@ -3345,5 +3385 @@ keras = [ - {file = "keras-2.10.0-py2.py3-none-any.whl", hash = "sha256:26a6e2c2522e7468ddea22710a99b3290493768fc08a39e75d1173a0e3452fdf"}, -] -keras-preprocessing = [ - {file = "Keras_Preprocessing-1.1.2-py2.py3-none-any.whl", hash = "sha256:7b82029b130ff61cc99b55f3bd27427df4838576838c5b2f65940e4fcec99a7b"}, - {file = "Keras_Preprocessing-1.1.2.tar.gz", hash = "sha256:add82567c50c8bc648c14195bf544a5ce7c1f76761536956c3d2978970179ef3"}, + {file = "keras-2.11.0-py2.py3-none-any.whl", hash = "sha256:38c6fff0ea9a8b06a2717736565c92a73c8cd9b1c239e7125ccb188b7848f65e"}, @@ -3355,10 +3391,8 @@ libclang = [ - {file = "libclang-14.0.6-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:8791cf3c3b087c373a6d61e9199da7a541da922c9ddcfed1122090586b996d6e"}, - {file = "libclang-14.0.6-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b06fc76bd1e67c8b04b5719bf2ac5d6a323b289b245dfa9e468561d99538188"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:e429853939423f276a25140b0b702442d7da9a09e001c05e48df888336947614"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux2010_x86_64.whl", hash = "sha256:206d2789e4450a37d054e63b70451a6fc1873466397443fa13de2b3d4adb2796"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:e2add1703129b2abe066fb1890afa880870a89fd6ab4ec5d2a7a8dc8d271677e"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux2014_armv7l.whl", hash = "sha256:5dd3c6fca1b007d308a4114afa8e4e9d32f32b2572520701d45fcc626ac5cd6c"}, - {file = "libclang-14.0.6-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cfb0e892ebb5dff6bd498ab5778adb8581f26a00fd8347b3c76c989fe2fd04f7"}, - {file = "libclang-14.0.6-py2.py3-none-win_amd64.whl", hash = "sha256:ea03c12675151837660cdd5dce65bd89320896ac3421efef43a36678f113ce95"}, - {file = "libclang-14.0.6-py2.py3-none-win_arm64.whl", hash = "sha256:2e4303e04517fcd11173cb2e51a7070eed71e16ef45d4e26a82c5e881cac3d27"}, - {file = "libclang-14.0.6.tar.gz", hash = "sha256:9052a8284d8846984f6fa826b1d7460a66d3b23a486d782633b42b6e3b418789"}, + {file = "libclang-15.0.6.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:8621795e07b87e17fc7aac9f071bc7fe6b52ed6110c0a96a9975d8113c8c2527"}, + {file = "libclang-15.0.6.1-py2.py3-none-manylinux2010_x86_64.whl", hash = "sha256:69b01a23ab543908a661532595daa23cf88bd96d80e41f58ba0eaa6a378fe0d8"}, + {file = "libclang-15.0.6.1-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:4a5188184b937132c198ee9de9a8a2316d5fdd1a825398d5ad1a8f5e06f9b40e"}, + {file = "libclang-15.0.6.1-py2.py3-none-manylinux2014_armv7l.whl", hash = "sha256:f7ffa02ac5e586cfffde039dcccc439d88d0feac7d77bf9426d9ba7543d16545"}, + {file = "libclang-15.0.6.1-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:aaebb6aa1db73bac3a0ac41e57ef78743079eb68728adbf7e80ee917ae171529"}, + {file = "libclang-15.0.6.1-py2.py3-none-win_amd64.whl", hash = "sha256:85afb47630d2070e74b886040ceea1846097ca53cc88d0f1d7751d0f49220028"}, + {file = "libclang-15.0.6.1-py2.py3-none-win_arm64.whl", hash = "sha256:687d8549c110c700fece58dd87727421d0710fdd111aa7eecb01faf8e3f50d4e"}, + {file = "libclang-15.0.6.1.tar.gz", hash = "sha256:a1a8fe038af2962c787c5bac81bfa4b82bb8e279e61e70cc934c10f6e20c73ec"}, @@ -3367 +3401 @@ libcommon = [ - {file = "libcommon-0.6.1-py3-none-any.whl", hash = "sha256:e62070144ec77422c60b915c351f15c06a27ebe0fd30024ebac7f4f35d250454"}, + {file = "libcommon-0.6.4-py3-none-any.whl", hash = "sha256:523d724b1b2c676f8a387287def7c709432dc6b1671ea1d29dab2b58100e4d87"}, @@ -3412,70 +3446,77 @@ lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, @@ -3600,74 +3641,74 @@ multidict = [ - {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37"}, - {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d"}, - {file = "multidict-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614"}, - {file = "multidict-6.0.3-cp310-cp310-win32.whl", hash = "sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba"}, - {file = "multidict-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734"}, - {file = "multidict-6.0.3-cp311-cp311-win32.whl", hash = "sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531"}, - {file = "multidict-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30"}, - {file = "multidict-6.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000"}, - {file = "multidict-6.0.3-cp37-cp37m-win32.whl", hash = "sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253"}, - {file = "multidict-6.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7"}, - {file = "multidict-6.0.3-cp38-cp38-win32.whl", hash = "sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0"}, - {file = "multidict-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08"}, - {file = "multidict-6.0.3-cp39-cp39-win32.whl", hash = "sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce"}, - {file = "multidict-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51"}, - {file = "multidict-6.0.3.tar.gz", hash = "sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, @@ -3717,2 +3758,2 @@ nltk = [ - {file = "nltk-3.7-py3-none-any.whl", hash = "sha256:ba3de02490308b248f9b94c8bc1ac0683e9aa2ec49ee78536d8667afb5e3eec8"}, - {file = "nltk-3.7.zip", hash = "sha256:d6507d6460cec76d70afea4242a226a7542f85c669177b9c7f562b7cf1b05502"}, + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, @@ -3773,0 +3815,17 @@ numpy = [ +nvidia-cublas-cu11 = [ + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, +] +nvidia-cuda-nvrtc-cu11 = [ + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, +] +nvidia-cuda-runtime-cu11 = [ + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, +] +nvidia-cudnn-cu11 = [ + {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, + {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, +] @@ -3787,44 +3845,44 @@ orjson = [ - {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, - {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, - {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, - {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, - {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, - {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, - {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, - {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, - {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, - {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, - {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, - {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, - {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, - {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, - {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, - {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, - {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, - {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, - {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, - {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, - {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:143639b9898b094883481fac37733231da1c2ae3aec78a1dd8d3b58c9c9fceef"}, + {file = "orjson-3.8.5-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:31f43e63e0d94784c55e86bd376df3f80b574bea8c0bc5ecd8041009fa8ec78a"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c802ea6d4a0d40f096aceb5e7ef0a26c23d276cb9334e1cadcf256bb090b6426"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf298b55b371c2772420c5ace4d47b0a3ea1253667e20ded3c363160fd0575f6"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68cb4a8501a463771d55bb22fc72795ec7e21d71ab083e000a2c3b651b6fb2af"}, + {file = "orjson-3.8.5-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4f1427952b3bd92bfb63a61b7ffc33a9f54ec6de296fa8d924cbeba089866acb"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0a9f329468c8eb000742455b83546849bcd69495d6baa6e171c7ee8600a47bd"}, + {file = "orjson-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6535d527aa1e4a757a6ce9b61f3dd74edc762e7d2c6991643aae7c560c8440bd"}, + {file = "orjson-3.8.5-cp310-none-win_amd64.whl", hash = "sha256:2eee64c028adf6378dd714c8debc96d5b92b6bb4862debb65ca868e59bac6c63"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f5745ff473dd5c6718bf8c8d5bc183f638b4f3e03c7163ffcda4d4ef453f42ff"}, + {file = "orjson-3.8.5-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:544f1240b295083697027a5093ec66763218ff16f03521d5020e7a436d2e417b"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85c9c6bab97a831e7741089057347d99901b4db2451a076ca8adedc7d96297f"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bae7347764e7be6dada980fd071e865544c98317ab61af575c9cc5e1dc7e3fe"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f6f6e9d26a06b63126112a7bc8d8529df048d31df2a257a8484b76adf3e5d"}, + {file = "orjson-3.8.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:758238364142fcbeca34c968beefc0875ffa10aa2f797c82f51cfb1d22d0934e"}, + {file = "orjson-3.8.5-cp311-none-win_amd64.whl", hash = "sha256:cc7579240fb88a626956a6cb4a181a11b62afbc409ce239a7b866568a2412fa2"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:79aa3e47cbbd4eedbbde4f988f766d6cf38ccb51d52cfabfeb6b8d1b58654d25"}, + {file = "orjson-3.8.5-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2544cd0d089faa862f5a39f508ee667419e3f9e11f119a6b1505cfce0eb26601"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2be0025ca7e460bcacb250aba8ce0239be62957d58cf34045834cc9302611d3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b57bf72902d818506906e49c677a791f90dbd7f0997d60b14bc6c1ce4ce4cf9"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ae9832a11c6a9efa8c14224e5caf6e35046efd781de14e59eb69ab4e561cf3"}, + {file = "orjson-3.8.5-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:0e28330cc6d51741cad0edd1b57caf6c5531aff30afe41402acde0a03246b8ed"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:155954d725627b5480e6cc1ca488afb4fa685099a4ace5f5bf21a182fabf6706"}, + {file = "orjson-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ece1b6ef9312df5d5274ca6786e613b7da7de816356e36bcad9ea8a73d15ab71"}, + {file = "orjson-3.8.5-cp37-none-win_amd64.whl", hash = "sha256:6f58d1f0702332496bc1e2d267c7326c851991b62cf6395370d59c47f9890007"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:933f4ab98362f46a59a6d0535986e1f0cae2f6b42435e24a55922b4bc872af0c"}, + {file = "orjson-3.8.5-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:47a7ca236b25a138a74b2cb5169adcdc5b2b8abdf661de438ba65967a2cde9dc"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b573ca942c626fcf8a86be4f180b86b2498b18ae180f37b4180c2aced5808710"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9bab11611d5452efe4ae5315f5eb806f66104c08a089fb84c648d2e8e00f106"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee2f5f6476617d01ca166266d70fd5605d3397a41f067022ce04a2e1ced4c8d"}, + {file = "orjson-3.8.5-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:ec0b0b6cd0b84f03537f22b719aca705b876c54ab5cf3471d551c9644127284f"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df3287dc304c8c4556dc85c4ab89eb333307759c1863f95e72e555c0cfce3e01"}, + {file = "orjson-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09f40add3c2d208e20f8bf185df38f992bf5092202d2d30eced8f6959963f1d5"}, + {file = "orjson-3.8.5-cp38-none-win_amd64.whl", hash = "sha256:232ec1df0d708f74e0dd1fccac1e9a7008cd120d48fe695e8f0c9d80771da430"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:8fba3e7aede3e88a01e94e6fe63d4580162b212e6da27ae85af50a1787e41416"}, + {file = "orjson-3.8.5-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:85e22c358cab170c8604e9edfffcc45dd7b0027ce57ed6bcacb556e8bfbbb704"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeab1d8247507a75926adf3ca995c74e91f5db1f168815bf3e774f992ba52b50"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daaaef15a41e9e8cadc7677cefe00065ae10bce914eefe8da1cd26b3d063970b"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ccc9f52cf46bd353c6ae1153eaf9d18257ddc110d135198b0cd8718474685ce"}, + {file = "orjson-3.8.5-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d48c182c7ff4ea0787806de8a2f9298ca44fd0068ecd5f23a4b2d8e03c745cb6"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1848e3b4cc09cc82a67262ae56e2a772b0548bb5a6f9dcaee10dcaaf0a5177b7"}, + {file = "orjson-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38480031bc8add58effe802291e4abf7042ef72ae1a4302efe9a36c8f8bfbfcc"}, + {file = "orjson-3.8.5-cp39-none-win_amd64.whl", hash = "sha256:0e9a1c2e649cbaed410c882cedc8f3b993d8f1426d9327f31762d3f46fe7cc88"}, + {file = "orjson-3.8.5.tar.gz", hash = "sha256:77a3b2bd0c4ef7723ea09081e3329dac568a62463aed127c1501441b07ffc64b"}, @@ -3837,2 +3895,2 @@ packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -3870,2 +3928,2 @@ pandas-stubs = [ - {file = "pandas-stubs-1.5.2.221124.tar.gz", hash = "sha256:d6bab9f373ff3c309bf560065d230a38ce4dcd22368be393fad6eb353d102b7c"}, - {file = "pandas_stubs-1.5.2.221124-py3-none-any.whl", hash = "sha256:5a2c47a0cf8e12e113d760d5da9c48daa2b977b14a4c368b8bbff27dbfcfd2bb"}, + {file = "pandas_stubs-1.5.2.230105-py3-none-any.whl", hash = "sha256:b2874d26eabcaecb83aa8435d47e41f1b5f90ad04f3e1e70c3c8604027490225"}, + {file = "pandas_stubs-1.5.2.230105.tar.gz", hash = "sha256:c78e433aca82577ce4a9fefc0d78cd26828dd95f8b1612416babff1b6957a22a"}, @@ -3874,2 +3932,2 @@ pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, @@ -3878,2 +3936,2 @@ pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, @@ -3882,61 +3940,70 @@ pillow = [ - {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, - {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, - {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, - {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, - {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, - {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, - {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, - {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, - {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, - {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, - {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, - {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, - {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, - {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, - {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, - {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, - {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, - {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, @@ -3953,2 +4020,2 @@ pip-audit = [ - {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, - {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, + {file = "pip_audit-2.4.13-py3-none-any.whl", hash = "sha256:3ea2fc5c70bf335362d4d81a7bd1084787efac34929e422f79bd8cf8804da2e2"}, + {file = "pip_audit-2.4.13.tar.gz", hash = "sha256:e0c9fe070a16aefdbb9c4d43df6a0183bc951375a293f58264c5e80b5edb57d7"}, @@ -3957,2 +4024,2 @@ pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, + {file = "pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"}, + {file = "pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526"}, @@ -3961,2 +4028,2 @@ platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, @@ -3977,2 +4044,2 @@ proto-plus = [ - {file = "proto-plus-1.22.1.tar.gz", hash = "sha256:6c7dfd122dfef8019ff654746be4f5b1d9c80bba787fe9611b508dd88be3a2fa"}, - {file = "proto_plus-1.22.1-py3-none-any.whl", hash = "sha256:ea8982669a23c379f74495bc48e3dcb47c822c484ce8ee1d1d7beb339d4e34c5"}, + {file = "proto-plus-1.22.2.tar.gz", hash = "sha256:0e8cda3d5a634d9895b75c573c9352c16486cb75deb0e078b5fda34db4243165"}, + {file = "proto_plus-1.22.2-py3-none-any.whl", hash = "sha256:de34e52d6c9c6fcd704192f09767cb561bb4ee64e70eede20b0834d841f0be4d"}, @@ -4196,2 +4263,2 @@ pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -4403,2 +4470,2 @@ pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -4419,2 +4486,2 @@ pytz = [ - {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, - {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -4666,2 +4733,2 @@ requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -4686,2 +4753,2 @@ rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, + {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -4694,21 +4761,21 @@ scikit-learn = [ - {file = "scikit-learn-1.1.3.tar.gz", hash = "sha256:bef51978a51ec19977700fe7b86aecea49c825884f3811756b74a3b152bb4e35"}, - {file = "scikit_learn-1.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8e9dd76c7274055d1acf4526b8efb16a3531c26dcda714a0c16da99bf9d41900"}, - {file = "scikit_learn-1.1.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ee47f68d973cee7009f06edb956f2f5588a0f230f24a2a70175fd0ecf36e2653"}, - {file = "scikit_learn-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da5a2e95fef9805b1750e4abda4e834bf8835d26fc709a391543b53feee7bd0e"}, - {file = "scikit_learn-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:701181792a28c82fecae12adb5d15d0ecf57bffab7cf4bdbb52c7b3fd428d540"}, - {file = "scikit_learn-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:30e27721adc308e8fd9f419f43068e43490005f911edf4476a9e585059fa8a83"}, - {file = "scikit_learn-1.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5699cded6c0685426433c7e5afe0fecad80ec831ec7fa264940e50c796775cc5"}, - {file = "scikit_learn-1.1.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:2ee2c649f2231b68511aabb0dc827edd8936aad682acc6263c34aed11bc95dac"}, - {file = "scikit_learn-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d1c1394e38a3319ace620381f6f23cc807d8780e9915c152449a86fc8f1db21"}, - {file = "scikit_learn-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:250da993701da88bf475e7c5746abf1285ea0ae47e4d0917cd13afd6600bb162"}, - {file = "scikit_learn-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:fd3ee69d36d42a7dcbb17e355a5653af5fd241a7dfd9133080b3dde8d9e2aafb"}, - {file = "scikit_learn-1.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f5644663987ee221f5d1f47a593271b966c271c236fe05634e6bdc06041b5a2b"}, - {file = "scikit_learn-1.1.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:748f2bd632d6993e8918d43f1a26c380aeda4e122a88840d4c3a9af99d4239fe"}, - {file = "scikit_learn-1.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd55c6fbef7608dbce1f22baf289dfcc6eb323247daa3c3542f73d389c724786"}, - {file = "scikit_learn-1.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38814f66285318f2e241305cca545eaa9b4126c65aa5dd78c69371f235f78e2b"}, - {file = "scikit_learn-1.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:f4931f2a6c06e02c6c17a05f8ae397e2545965bc7a0a6cb38c8cd7d4fba8624d"}, - {file = "scikit_learn-1.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6785b8a3093329bf90ac01801be5525551728ae73edb11baa175df660820add4"}, - {file = "scikit_learn-1.1.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:28b2bd6a1419acd522ff45d282c8ba23dbccb5338802ab0ee12baa4ade0aba4c"}, - {file = "scikit_learn-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23fb9e74b813cc2528b5167d82ed08950b11106ccf50297161875e45152fb311"}, - {file = "scikit_learn-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5d4231af7199531e77da1b78a4cc6b3d960a00b1ec672578ac818aae2b9c35d"}, - {file = "scikit_learn-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:4d3a19166d4e1cdfcab975c68f471e046ce01e74c42a9a33fa89a14c2fcedf60"}, + {file = "scikit-learn-1.2.0.tar.gz", hash = "sha256:680b65b3caee469541385d2ca5b03ff70408f6c618c583948312f0d2125df680"}, + {file = "scikit_learn-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1beaa631434d1f17a20b1eef5d842e58c195875d2bc11901a1a70b5fe544745b"}, + {file = "scikit_learn-1.2.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d395730f26d8fc752321f1953ddf72647c892d8bed74fad4d7c816ec9b602dfa"}, + {file = "scikit_learn-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd3480c982b9e616b9f76ad8587804d3f4e91b4e2a6752e7dafb8a2e1f541098"}, + {file = "scikit_learn-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:184a42842a4e698ffa4d849b6019de50a77a0aa24d26afa28fa49c9190bb144b"}, + {file = "scikit_learn-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:867023a044fdfe59e5014a7fec7a3086a8928f10b5dce9382eedf4135f6709a2"}, + {file = "scikit_learn-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5546a8894a0616e92489ef995b39a0715829f3df96e801bb55cbf196be0d9649"}, + {file = "scikit_learn-1.2.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:bc7073e025b62c1067cbfb76e69d08650c6b9d7a0e7afdfa20cb92d4afe516f6"}, + {file = "scikit_learn-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0a72237f0c56780cf550df87201a702d3bdcbbb23c6ef7d54c19326fa23f19"}, + {file = "scikit_learn-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1ea0bc1706da45589bcf2490cde6276490a1b88f9af208dbb396fdc3a0babf"}, + {file = "scikit_learn-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f17420a8e3f40129aeb7e0f5ee35822d6178617007bb8f69521a2cefc20d5f00"}, + {file = "scikit_learn-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25ba705ee1600ffc5df1dccd8fae129d7c6836e44ffcbb52d78536c9eaf8fcf9"}, + {file = "scikit_learn-1.2.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:6b63ca2b0643d30fbf9d25d93017ed3fb8351f31175d82d104bfec60cba7bb87"}, + {file = "scikit_learn-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c772fa8c64776ad769fd764752c8452844307adcf10dee3adcc43988260f21"}, + {file = "scikit_learn-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0834e4cec2a2e0d8978f39cb8fe1cad3be6c27a47927e1774bf5737ea65ec228"}, + {file = "scikit_learn-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:da29d2e379c396a63af5ed4b671ad2005cd690ac373a23bee5a0f66504e05272"}, + {file = "scikit_learn-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:23a88883ca60c571a06278e4726b3b51b3709cfa4c93cacbf5568b22ba960899"}, + {file = "scikit_learn-1.2.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:40f3ff68c505cb9d1f3693397c73991875d609da905087e00e7b4477645ec67b"}, + {file = "scikit_learn-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9535e867281ae6987bb80620ba14cf1649e936bfe45f48727b978b7a2dbe835"}, + {file = "scikit_learn-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de897720173b26842e21bed54362f5294e282422116b61cd931d4f5d870b9855"}, + {file = "scikit_learn-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ceb0008f345188aa236e49c973dc160b9ed504a3abd7b321a0ecabcb669be0bd"}, @@ -4717,21 +4784,21 @@ scipy = [ - {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"}, - {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"}, - {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"}, - {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"}, - {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"}, - {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"}, - {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"}, - {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"}, - {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"}, - {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"}, - {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"}, - {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"}, - {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"}, - {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"}, - {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"}, - {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"}, - {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"}, - {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"}, - {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"}, - {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"}, - {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"}, + {file = "scipy-1.10.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:b901b423c91281a974f6cd1c36f5c6c523e665b5a6d5e80fcb2334e14670eefd"}, + {file = "scipy-1.10.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:16ba05d3d1b9f2141004f3f36888e05894a525960b07f4c2bfc0456b955a00be"}, + {file = "scipy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:151f066fe7d6653c3ffefd489497b8fa66d7316e3e0d0c0f7ff6acca1b802809"}, + {file = "scipy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9ea0a37aca111a407cb98aa4e8dfde6e5d9333bae06dfa5d938d14c80bb5c3"}, + {file = "scipy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:27e548276b5a88b51212b61f6dda49a24acf5d770dff940bd372b3f7ced8c6c2"}, + {file = "scipy-1.10.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:42ab8b9e7dc1ebe248e55f54eea5307b6ab15011a7883367af48dd781d1312e4"}, + {file = "scipy-1.10.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e096b062d2efdea57f972d232358cb068413dc54eec4f24158bcbb5cb8bddfd8"}, + {file = "scipy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df25a28bd22c990b22129d3c637fd5c3be4b7c94f975dca909d8bab3309b694"}, + {file = "scipy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad449db4e0820e4b42baccefc98ec772ad7818dcbc9e28b85aa05a536b0f1a2"}, + {file = "scipy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:6faf86ef7717891195ae0537e48da7524d30bc3b828b30c9b115d04ea42f076f"}, + {file = "scipy-1.10.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:4bd0e3278126bc882d10414436e58fa3f1eca0aa88b534fcbf80ed47e854f46c"}, + {file = "scipy-1.10.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:38bfbd18dcc69eeb589811e77fae552fa923067fdfbb2e171c9eac749885f210"}, + {file = "scipy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ab2a58064836632e2cec31ca197d3695c86b066bc4818052b3f5381bfd2a728"}, + {file = "scipy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd7a30970c29d9768a7164f564d1fbf2842bfc77b7d114a99bc32703ce0bf48"}, + {file = "scipy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:9b878c671655864af59c108c20e4da1e796154bd78c0ed6bb02bc41c84625686"}, + {file = "scipy-1.10.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:3afcbddb4488ac950ce1147e7580178b333a29cd43524c689b2e3543a080a2c8"}, + {file = "scipy-1.10.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:6e4497e5142f325a5423ff5fda2fff5b5d953da028637ff7c704378c8c284ea7"}, + {file = "scipy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:441cab2166607c82e6d7a8683779cb89ba0f475b983c7e4ab88f3668e268c143"}, + {file = "scipy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0490dc499fe23e4be35b8b6dd1e60a4a34f0c4adb30ac671e6332446b3cbbb5a"}, + {file = "scipy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:954ff69d2d1bf666b794c1d7216e0a746c9d9289096a64ab3355a17c7c59db54"}, + {file = "scipy-1.10.0.tar.gz", hash = "sha256:c8b3cbc636a87a89b770c6afc999baa6bcbb01691b5ccbbc1b1791c7c0a07540"}, @@ -4740,2 +4807,2 @@ setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"}, + {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"}, @@ -4772 +4839 @@ tensorboard = [ - {file = "tensorboard-2.10.1-py3-none-any.whl", hash = "sha256:fb9222c1750e2fa35ef170d998a1e229f626eeced3004494a8849c88c15d8c1c"}, + {file = "tensorboard-2.11.2-py3-none-any.whl", hash = "sha256:cbaa2210c375f3af1509f8571360a19ccc3ded1d9641533414874b5deca47e89"}, @@ -4783,16 +4850,16 @@ tensorflow = [ - {file = "tensorflow-2.10.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:dc3587dfa714be711d2681d5e2fb59037b18e83e692f084db49bce31b6268d15"}, - {file = "tensorflow-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3cab933757eb0c204dc4cf34d031939e33cae8f97a7aaef00a12678129b17f"}, - {file = "tensorflow-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f1d579b849afaea7b10f7693dc43b1d07321d279a016f01e2ddfe971d0d8af"}, - {file = "tensorflow-2.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a6049664f9a0d14b0a4a7e6f058be87b2d8c27be826d7dd9a870ff03683fbc0b"}, - {file = "tensorflow-2.10.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ae77b9fcf826cdb05e8c3c6cfcd0ce10b9adcf2ffe952e159cf6ef182f0f3682"}, - {file = "tensorflow-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8f6f1344cab3ef7e6c794b3e252bbedc764c198be645a5b396c3b67b8bc093"}, - {file = "tensorflow-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:886180162db50ac7c5f8e2affbeae32588c97d08e49089135c71052392913dca"}, - {file = "tensorflow-2.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:981b08964e132de71a37b98b6d5ec4204aa51bc9529ecc7fefcd01c33d7e7d53"}, - {file = "tensorflow-2.10.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f1c11fad08aa24f4838caf0aa1fba694bfaa323168d3e42e58387f5239943b56"}, - {file = "tensorflow-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7603cef40bee34cebdfbf264f9ce14c25529356f581f6fb5605f567efd92e07"}, - {file = "tensorflow-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee057aa57957b1a689c181bd406c30cbe152b7893c484fe6a26fcce6750f665"}, - {file = "tensorflow-2.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:5ef5e562e6baa9dc9f58db324668e7991caec546dfd5ed50647c734cd0d2daab"}, - {file = "tensorflow-2.10.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:18895381a123de287f94b1f76ceb56e86227a13e414a2928ab470d7c5b6b4c52"}, - {file = "tensorflow-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d07439c32b579b4c0251b494002e85954b37447286f2e65554f3ad940e496ff"}, - {file = "tensorflow-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab2d33039fc8b340feb3d1f56db2c3d4bb25f059089a42dbe067b879add61815"}, - {file = "tensorflow-2.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:153111af1d773033264f8591f5deffece180a1f16935b579f43edd83acb17584"}, + {file = "tensorflow-2.11.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6c049fec6c2040685d6f43a63e17ccc5d6b0abc16b70cc6f5e7d691262b5d2d0"}, + {file = "tensorflow-2.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcc8380820cea8f68f6c90b8aee5432e8537e5bb9ec79ac61a98e6a9a02c7d40"}, + {file = "tensorflow-2.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d973458241c8771bf95d4ba68ad5d67b094f72dd181c2d562ffab538c1b0dad7"}, + {file = "tensorflow-2.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:d470b772ee3c291a8c7be2331e7c379e0c338223c0bf532f5906d4556f17580d"}, + {file = "tensorflow-2.11.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:d29c1179149fa469ad68234c52c83081d037ead243f90e826074e2563a0f938a"}, + {file = "tensorflow-2.11.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cdba2fce00d6c924470d4fb65d5e95a4b6571a863860608c0c13f0393f4ca0d"}, + {file = "tensorflow-2.11.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2ab20f93d2b52a44b414ec6dcf82aa12110e90e0920039a27108de28ae2728"}, + {file = "tensorflow-2.11.0-cp37-cp37m-win_amd64.whl", hash = "sha256:445510f092f7827e1f60f59b8bfb58e664aaf05d07daaa21c5735a7f76ca2b25"}, + {file = "tensorflow-2.11.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:056d29f2212342536ce3856aa47910a2515eb97ec0a6cc29ed47fc4be1369ec8"}, + {file = "tensorflow-2.11.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17b29d6d360fad545ab1127db52592efd3f19ac55c1a45e5014da328ae867ab4"}, + {file = "tensorflow-2.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:335ab5cccd7a1c46e3d89d9d46913f0715e8032df8d7438f9743b3fb97b39f69"}, + {file = "tensorflow-2.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:d48da37c8ae711eb38047a56a052ca8bb4ee018a91a479e42b7a8d117628c32e"}, + {file = "tensorflow-2.11.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:d9cf25bca641f2e5c77caa3bfd8dd6b892a7aec0695c54d2a7c9f52a54a8d487"}, + {file = "tensorflow-2.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d28f9691ebc48c0075e271023b3f147ae2bc29a3d3a7f42d45019c6b4a700d2"}, + {file = "tensorflow-2.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:276a44210d956701899dc78ad0aa116a0071f22fb0bcc1ea6bb59f7646b08d11"}, + {file = "tensorflow-2.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:cc3444fe1d58c65a195a69656bf56015bf19dc2916da607d784b0a1e215ec008"}, @@ -4801 +4868 @@ tensorflow-estimator = [ - {file = "tensorflow_estimator-2.10.0-py2.py3-none-any.whl", hash = "sha256:f324ea17cd57f16e33bf188711d5077e6b2e5f5a12c328d6e01a07b23888edcd"}, + {file = "tensorflow_estimator-2.11.0-py2.py3-none-any.whl", hash = "sha256:ea3b64acfff3d9a244f06178c9bdedcbdd3f125b67d0888dba8229498d06468b"}, @@ -4804,15 +4871,19 @@ tensorflow-io-gcs-filesystem = [ - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:22753dc28c949bfaf29b573ee376370762c88d80330fe95cfb291261eb5e927a"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:52988659f405166df79905e9859bc84ae2a71e3ff61522ba32a95e4dce8e66d2"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:698d7f89e09812b9afeb47c3860797343a22f997c64ab9dab98132c61daa8a7d"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:bbf245883aa52ec687b66d0fcbe0f5f0a92d98c0b1c53e6a736039a3548d29a1"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6d95f306ff225c5053fd06deeab3e3a2716357923cb40c44d566c11be779caa3"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:5fbef5836e70026245d8d9e692c44dae2c6dbc208c743d01f5b7a2978d6b6bc6"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:00cf6a92f1f9f90b2ba2d728870bcd2a70b116316d0817ab0b91dd390c25b3fd"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f76cbe1a784841c223f6861e5f6c7e53aa6232cb626d57e76881a0638c365de6"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c5d99f56c12a349905ff684142e4d2df06ae68ecf50c4aad5449a5f81731d858"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b6e2d275020fb4d1a952cd3fa546483f4e46ad91d64e90d3458e5ca3d12f6477"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6670e0da16c884267e896ea5c3334d6fd319bd6ff7cf917043a9f3b2babb1b3"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:bfed720fc691d3f45802a7bed420716805aef0939c11cebf25798906201f626e"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:cc062ce13ec95fb64b1fd426818a6d2b0e5be9692bc0e43a19cce115b6da4336"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:366e1eff8dbd6b64333d7061e2a8efd081ae4742614f717ced08d8cc9379eb50"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:9484893779324b2d34874b0aacf3b824eb4f22d782e75df029cbccab2e607974"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:d8eb242b118721c8a23d598af69c25ad450c1e18bd1cd7ef7e8274ae0e7781ca"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c826154e0c6bd9a1a0395a1b985976ac255a3d79d147d3eb10343b6d15710267"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9db8dc3f2c4ddfdf02f33a492600be35d0bca085aa12121a5feef173e6b5914e"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:7ff4b18f1a74e1a56603fa204cf82b1af5b24ad18c579692c487b4fb4a2baec8"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:571fc6ba4960f3a749a362d487b60e248eb43f0abcfb0ace4f04ddb91ae04faf"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6297b68677a17ce388594fcf76c70718b837dba59e858280898521a858a8e4c"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:ff107ac28d56bdd4c50ac69b18cc237d3a9342be8c2d11e458e19a0fac31fb9d"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:6617054b8ac75cf2f19ec24ddf3a696a500758d1f755b847f3ba42aec6ad7b9e"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec87a475a4024bc8c4427c6cbeba009fd76b1b95ad764755fdf958c234470acd"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:931e225b406d67d0a8a8f549242a0a1d173a02e0179d86b88351de19c393e06f"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp37-cp37m-win_amd64.whl", hash = "sha256:975b674d7816c08cf47a50cfc5b73a36ca0b3ef32d8e37a788b7cae38b9e1549"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e114d672fc565985468d6a26a1e54e2f0002ab76c711f49a56d948ad05718c80"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:630372fe2f1c05b57c4ad0c9050b570bc67e474ec513cf046832f8889002fab7"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0736853c35030e027c0f9577f07355e5cd21463321008ef4ee8b390f913fdd6"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:049b21d5056f57952b0450c2bac9c2bf2fabc4bbb571470abf0cba4243a41dfe"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:655af38843d83ef322db190d162e21060ca143935a04f4b64b29f60c1c4dc073"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8543e14b2f32771e7a7eca7a3d34a8fbdf1f4e9ae7d346bcacff011f43c693cb"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef5656932c1f4d4e4d366cdae469562344ecd38cd51de70ebf60e68ee0834da1"}, + {file = "tensorflow_io_gcs_filesystem-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:bcb7405474ed136b3a2e2181a732968c52ba9f35a03fe296acc9f1ec4e7044ee"}, @@ -4821,6 +4892,6 @@ tensorflow-macos = [ - {file = "tensorflow_macos-2.10.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:dfd1dd478b3ae01e8d578c38083bef68bc838ceaa05a813b6788fe9e6ec19140"}, - {file = "tensorflow_macos-2.10.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:aa074b5442d3411e5416c5112531d8b78a8c469ca92fa41c0e0cf14428608bf3"}, - {file = "tensorflow_macos-2.10.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:e15ab56f39f3d2e7c07a72a5969f025e259b1d3fcb9c3f7217f17b62581c33a8"}, - {file = "tensorflow_macos-2.10.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:00be07ffcf8b6288fa3dd9a27a3b6dd0f6c85af5c3109451a1b7e720bb817d14"}, - {file = "tensorflow_macos-2.10.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f2ec603c5496c25fb1bcda8eb4166423bf023bfb7ae6cbdec0be8796ca67e866"}, - {file = "tensorflow_macos-2.10.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:3177a8a97149f78748eeb20009aa3dc27ae2c112ab6380886ec8620bbcb70f19"}, + {file = "tensorflow_macos-2.11.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0bdbd1bb564d01bd735d6d11451f0658c3dd8187369ee9dd3ed6de6bbdd6df53"}, + {file = "tensorflow_macos-2.11.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:66eb67915cf418eddd3b4c158132609efd50895fa09fd55e4b2f14a3ab85bd34"}, + {file = "tensorflow_macos-2.11.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:6810731e2c8353123f6c9c944d2765b58a2226e7eb9fec1e360f73977c6c6aa4"}, + {file = "tensorflow_macos-2.11.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:881b36d97b67d24197250a091c52c31db14aecfdbf1ac20418a148ec37321978"}, + {file = "tensorflow_macos-2.11.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8d56b0d0bd140008b0cc4877804c9c310e1e2735444fa99bc7c88ffb2909153d"}, + {file = "tensorflow_macos-2.11.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:db97cd91b905bd01069069f07325a2a291705222eb4914148b9574090a5815ae"}, @@ -4829,2 +4900,2 @@ termcolor = [ - {file = "termcolor-2.1.1-py3-none-any.whl", hash = "sha256:fa852e957f97252205e105dd55bbc23b419a70fec0085708fc0515e399f304fd"}, - {file = "termcolor-2.1.1.tar.gz", hash = "sha256:67cee2009adc6449c650f6bcf3bdeed00c8ba53a8cda5362733c53e0a39fb70b"}, + {file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"}, + {file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"}, @@ -4894,19 +4965,21 @@ torch = [ - {file = "torch-1.10.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:8f3fd2e3ffc3bb867133fdf7fbcc8a0bb2e62a5c0696396f51856f5abf9045a8"}, - {file = "torch-1.10.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:258a0729fb77a3457d5822d84b536057cd119b08049a8d3c41dc3dcdeb48d56e"}, - {file = "torch-1.10.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:935e5ac804c5093c79f23a7e6ca5b912c166071aa9d8b4a0a3d6a85126d6a47b"}, - {file = "torch-1.10.2-cp36-cp36m-win_amd64.whl", hash = "sha256:65fd02ed889c63fd82bf1a440c5a94c1310c29f3e6f9f62add416d34da355d97"}, - {file = "torch-1.10.2-cp36-none-macosx_10_9_x86_64.whl", hash = "sha256:6a81f886823bbd15edc2dc0908fa214070df61c9f7ab8831f0a03630275cca5a"}, - {file = "torch-1.10.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3eee3cf53c1f8fb3f1fe107a22025a8501fc6440d14e09599ba7153002531f84"}, - {file = "torch-1.10.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ef99b8cca5f9358119b07956915faf6e7906f433ab4a603c160ae9de88918371"}, - {file = "torch-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d43bc3f3a2d89ae185ef96d903c935c335219231e57685658648396984e2a67a"}, - {file = "torch-1.10.2-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:6da1b877880435440a5aa9678ef0f01986d4886416844db1d97ebfb7fd1778d0"}, - {file = "torch-1.10.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ab77a9f838874f295ed5410c0686fa22547456e0116efb281c66ef5f9d46fe28"}, - {file = "torch-1.10.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9ef4c004f9e5168bd1c1930c6aff25fed5b097de81db6271ffbb2e4fb8b89319"}, - {file = "torch-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:376fc18407add20daa6bbaaffc5a5e06d733abe53bcbd60ef2532bfed34bc091"}, - {file = "torch-1.10.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:f281438ee99bd72ad65c0bba1026a32e45c3b636bc067fc145ad291e9ea2faab"}, - {file = "torch-1.10.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:3592d3dd62b32760c82624e7586222747fe2281240e8653970b35f1d6d4a434c"}, - {file = "torch-1.10.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fbaf18c1b3e0b31af194a9d853e3739464cf982d279df9d34dd18f1c2a471878"}, - {file = "torch-1.10.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:97b7b0c667e8b0dd1fc70137a36e0a4841ec10ef850bda60500ad066bef3e2de"}, - {file = "torch-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:901b52787baeb2e9e1357ca7037da0028bc6ad743f530e0040ae96ef8e27156c"}, - {file = "torch-1.10.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:5b68e9108bd7ebd99eee941686046c517cfaac5331f757bcf440fe02f2e3ced1"}, - {file = "torch-1.10.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:b07ef01e36b716d0d65ca60c4db0ac9d094a0e797d9b55290da4dcda91463b6c"}, + {file = "torch-1.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:fd12043868a34a8da7d490bf6db66991108b00ffbeecb034228bfcbbd4197143"}, + {file = "torch-1.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d9fe785d375f2e26a5d5eba5de91f89e6a3be5d11efb497e76705fdf93fa3c2e"}, + {file = "torch-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:98124598cdff4c287dbf50f53fb455f0c1e3a88022b39648102957f3445e9b76"}, + {file = "torch-1.13.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:393a6273c832e047581063fb74335ff50b4c566217019cc6ace318cd79eb0566"}, + {file = "torch-1.13.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:0122806b111b949d21fa1a5f9764d1fd2fcc4a47cb7f8ff914204fd4fc752ed5"}, + {file = "torch-1.13.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:22128502fd8f5b25ac1cd849ecb64a418382ae81dd4ce2b5cebaa09ab15b0d9b"}, + {file = "torch-1.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:76024be052b659ac1304ab8475ab03ea0a12124c3e7626282c9c86798ac7bc11"}, + {file = "torch-1.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ea8dda84d796094eb8709df0fcd6b56dc20b58fdd6bc4e8d7109930dafc8e419"}, + {file = "torch-1.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2ee7b81e9c457252bddd7d3da66fb1f619a5d12c24d7074de91c4ddafb832c93"}, + {file = "torch-1.13.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:0d9b8061048cfb78e675b9d2ea8503bfe30db43d583599ae8626b1263a0c1380"}, + {file = "torch-1.13.1-cp37-none-macosx_11_0_arm64.whl", hash = "sha256:f402ca80b66e9fbd661ed4287d7553f7f3899d9ab54bf5c67faada1555abde28"}, + {file = "torch-1.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:727dbf00e2cf858052364c0e2a496684b9cb5aa01dc8a8bc8bbb7c54502bdcdd"}, + {file = "torch-1.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:df8434b0695e9ceb8cc70650afc1310d8ba949e6db2a0525ddd9c3b2b181e5fe"}, + {file = "torch-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5e1e722a41f52a3f26f0c4fcec227e02c6c42f7c094f32e49d4beef7d1e213ea"}, + {file = "torch-1.13.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:33e67eea526e0bbb9151263e65417a9ef2d8fa53cbe628e87310060c9dcfa312"}, + {file = "torch-1.13.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:eeeb204d30fd40af6a2d80879b46a7efbe3cf43cdbeb8838dd4f3d126cc90b2b"}, + {file = "torch-1.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:50ff5e76d70074f6653d191fe4f6a42fdbe0cf942fbe2a3af0b75eaa414ac038"}, + {file = "torch-1.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2c3581a3fd81eb1f0f22997cddffea569fea53bafa372b2c0471db373b26aafc"}, + {file = "torch-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:0aa46f0ac95050c604bcf9ef71da9f1172e5037fdf2ebe051962d47b123848e7"}, + {file = "torch-1.13.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6930791efa8757cb6974af73d4996b6b50c592882a324b8fb0589c6a9ba2ddaf"}, + {file = "torch-1.13.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e0df902a7c7dd6c795698532ee5970ce898672625635d885eade9976e5a04949"}, @@ -4915,18 +4988,19 @@ torchaudio = [ - {file = "torchaudio-0.10.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:fd7ea7dfe52456621e1fe8d40129d1d1e765a444fd16b43c494732835c23f2b0"}, - {file = "torchaudio-0.10.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6619b0e85bc47e559598c12d98aac7cfeb63e0910c121ef3e0611ff17d3f5753"}, - {file = "torchaudio-0.10.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:728b4bf7e9bb6f4d44b397e6f8ffc74e6588cff7c52cd03e8b76759fa895d46a"}, - {file = "torchaudio-0.10.2-cp36-cp36m-win_amd64.whl", hash = "sha256:e7b1463a7ab1322f0fb0b35b2e5aee6a8bde24709d2c1135b4db5ec4e72a94a8"}, - {file = "torchaudio-0.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f22f1130705015e33e3b40f840cedcaadabab08eb51ee71f15ad27746ce7be06"}, - {file = "torchaudio-0.10.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:557de9a84b4c4b04f83f1ef3abe6d2bc37f4e9ee7bd149b44568d5e3f145edb9"}, - {file = "torchaudio-0.10.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:57ef69529c4307db35f5fd5dd1bf295af1ae4cc5c82d82b87753ebe99ac91332"}, - {file = "torchaudio-0.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd7eb11904696b62a1948cc6bcb75628bfa7830b808b928e362368506997b285"}, - {file = "torchaudio-0.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7be36f12ed5b97a4b774257dba4e5f78f9e84edcd534f28ffdf6892c919aada7"}, - {file = "torchaudio-0.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:05e2f56a310d9914b434e49b4b77483d56ca4820d194123c9838ac61e14455ff"}, - {file = "torchaudio-0.10.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:773db781e7a8bcde8e171121ec0349833ca662e5338025f5f5a4d8846f91cacc"}, - {file = "torchaudio-0.10.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b4a8d319b85e0964f4def2a7a391feb5fcab1c08f71e790941e3826674b345c6"}, - {file = "torchaudio-0.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:e7556773ab4b2bbbb755cd84497db7e7ebf73fe05811ede5c51a560ea05a56b0"}, - {file = "torchaudio-0.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b5663ddd40cee794c8c59cf61c3ee9108832152e11956f766610f92f87f21244"}, - {file = "torchaudio-0.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:677cf720f52af0e2cbde105d8ab79acfdb8c4590880a35796005b6b09da7d767"}, - {file = "torchaudio-0.10.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:98f6ad7d1b7d8546e3f0eab55147a88d55a12c84b5fd3bd9b1516ffb97a5b8ec"}, - {file = "torchaudio-0.10.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ea40d7969693a9be92d2df5db3f2cfacf4b9d696a2770ea3735b8596fd8c82b9"}, - {file = "torchaudio-0.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c09e24489d6ff9765614c6dd7c0a3771ded338f879a9bdadd284a854fb8bf374"}, + {file = "torchaudio-0.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e0f3dc6699506521364266704e6bf89d0d0579fd435d12c5c2f5858d52de4fa"}, + {file = "torchaudio-0.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ec72a17d4d2178829e7780682999b535cf57fe160d0c20b0d6bdc1ad1a87c4dd"}, + {file = "torchaudio-0.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:62e9b5c260a27231d905588b72d2e2984ff9cdbb557af86eb178982fd265198d"}, + {file = "torchaudio-0.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:2e47562cdcdd47cb8ed86a3cf053b7067cc9e88340f4550ae73d790ddbc12f21"}, + {file = "torchaudio-0.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5f2fc60206aa687eadc8cfb7c167784678936fbad13ccc583794fba3d6f77e1b"}, + {file = "torchaudio-0.13.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fa7cc1a2b3056fc6ceee6d60dbcdef58955a7ca534667d0db9b4fc9efa087a1"}, + {file = "torchaudio-0.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:901a6d65750fc3fb2c656ae274cc61599aa7d5472361fbc206e381a310d619d1"}, + {file = "torchaudio-0.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b798447283551def4d911dd6acb2c4bc08214e95f677f56c4f623fc99a90eff"}, + {file = "torchaudio-0.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6bb94deeaf05fab9ff2f34906d3c36b46032420c3a08253d8c452999c235119c"}, + {file = "torchaudio-0.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42ce5c66d304bc2cd68338916b8223e322e09a84dcbd9228814ef36bc477a37b"}, + {file = "torchaudio-0.13.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b093b3e7661c85168ec9dde2cf97345965ea0931d3d2a7e78bd409221e6d6998"}, + {file = "torchaudio-0.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:167f77ef385592a5af6f4e2ad1630a42ca1b70f905762fcd62e13dd4f163bdcf"}, + {file = "torchaudio-0.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3c48bcff00eae8180f87f58d1c9e7e9fd8c4cb7eb3ea8817935fb6048d152bc7"}, + {file = "torchaudio-0.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5de44b6b96a8d7a05650ef7377b2386650ddce92551d7dc02e05e7002aee5fd2"}, + {file = "torchaudio-0.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9d2170540de32ae031aab3936129868e896ea041617b6d6692dde6aa2dfb0a23"}, + {file = "torchaudio-0.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:91fcfbf47000402d12bff2624e6220a0fd3b8ca8ee6ff51edf5945ec39ab0a7f"}, + {file = "torchaudio-0.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:32592088b48dfcd2ca247ad5d081a9e0c61de0caabb993d68bac779326456d8d"}, + {file = "torchaudio-0.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3023aeb5c191047bef1681a3741bffd4a2164b58a64cad24dd37da5e1ac2d1f1"}, + {file = "torchaudio-0.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:418fbf93ba77b9742b05b76561be4fe7e8ded27cfe414828624765986b30ce5a"}, @@ -4984,2 +5058,2 @@ types-pytz = [ - {file = "types-pytz-2022.6.0.1.tar.gz", hash = "sha256:d078196374d1277e9f9984d49373ea043cf2c64d5d5c491fbc86c258557bd46f"}, - {file = "types_pytz-2022.6.0.1-py3-none-any.whl", hash = "sha256:bea605ce5d5a5d52a8e1afd7656c9b42476e18a0f888de6be91587355313ddf4"}, + {file = "types-pytz-2022.7.1.0.tar.gz", hash = "sha256:918f9c3e7a950ba7e7d6f84b18a7cacabc8886cb7125fb1927ff1c752b4b59de"}, + {file = "types_pytz-2022.7.1.0-py3-none-any.whl", hash = "sha256:10ec7d009a02340f1cecd654ac03f0c29b6088a03b63d164401fc52df45936b2"}, @@ -4988,2 +5062,2 @@ types-requests = [ - {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"}, - {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, + {file = "types-requests-2.28.11.7.tar.gz", hash = "sha256:0ae38633734990d019b80f5463dfa164ebd3581998ac8435f526da6fe4d598c3"}, + {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, @@ -5000,65 +5074,65 @@ ujson = [ - {file = "ujson-5.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e"}, - {file = "ujson-5.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7"}, - {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3"}, - {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e"}, - {file = "ujson-5.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f"}, - {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9"}, - {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b"}, - {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059"}, - {file = "ujson-5.6.0-cp310-cp310-win32.whl", hash = "sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f"}, - {file = "ujson-5.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169"}, - {file = "ujson-5.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56"}, - {file = "ujson-5.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82"}, - {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc"}, - {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6"}, - {file = "ujson-5.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275"}, - {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a"}, - {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c"}, - {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679"}, - {file = "ujson-5.6.0-cp311-cp311-win32.whl", hash = "sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490"}, - {file = "ujson-5.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7"}, - {file = "ujson-5.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e"}, - {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851"}, - {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf"}, - {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085"}, - {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035"}, - {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b"}, - {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458"}, - {file = "ujson-5.6.0-cp37-cp37m-win32.whl", hash = "sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b"}, - {file = "ujson-5.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf"}, - {file = "ujson-5.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a"}, - {file = "ujson-5.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847"}, - {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888"}, - {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b"}, - {file = "ujson-5.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555"}, - {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e"}, - {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d"}, - {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522"}, - {file = "ujson-5.6.0-cp38-cp38-win32.whl", hash = "sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e"}, - {file = "ujson-5.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e"}, - {file = "ujson-5.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99"}, - {file = "ujson-5.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2"}, - {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab"}, - {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d"}, - {file = "ujson-5.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983"}, - {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6"}, - {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709"}, - {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c"}, - {file = "ujson-5.6.0-cp39-cp39-win32.whl", hash = "sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490"}, - {file = "ujson-5.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6"}, - {file = "ujson-5.6.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804"}, - {file = "ujson-5.6.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f"}, - {file = "ujson-5.6.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765"}, - {file = "ujson-5.6.0.tar.gz", hash = "sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04"}, + {file = "ujson-5.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5eba5e69e4361ac3a311cf44fa71bc619361b6e0626768a494771aacd1c2f09b"}, + {file = "ujson-5.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aae4d9e1b4c7b61780f0a006c897a4a1904f862fdab1abb3ea8f45bd11aa58f3"}, + {file = "ujson-5.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2e43ccdba1cb5c6d3448eadf6fc0dae7be6c77e357a3abc968d1b44e265866d"}, + {file = "ujson-5.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54384ce4920a6d35fa9ea8e580bc6d359e3eb961fa7e43f46c78e3ed162d56ff"}, + {file = "ujson-5.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24ad1aa7fc4e4caa41d3d343512ce68e41411fb92adf7f434a4d4b3749dc8f58"}, + {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:afff311e9f065a8f03c3753db7011bae7beb73a66189c7ea5fcb0456b7041ea4"}, + {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e80f0d03e7e8646fc3d79ed2d875cebd4c83846e129737fdc4c2532dbd43d9e"}, + {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:137831d8a0db302fb6828ee21c67ad63ac537bddc4376e1aab1c8573756ee21c"}, + {file = "ujson-5.7.0-cp310-cp310-win32.whl", hash = "sha256:7df3fd35ebc14dafeea031038a99232b32f53fa4c3ecddb8bed132a43eefb8ad"}, + {file = "ujson-5.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:af4639f684f425177d09ae409c07602c4096a6287027469157bfb6f83e01448b"}, + {file = "ujson-5.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b0f2680ce8a70f77f5d70aaf3f013d53e6af6d7058727a35d8ceb4a71cdd4e9"}, + {file = "ujson-5.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a19fd8e7d8cc58a169bea99fed5666023adf707a536d8f7b0a3c51dd498abf"}, + {file = "ujson-5.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6abb8e6d8f1ae72f0ed18287245f5b6d40094e2656d1eab6d99d666361514074"}, + {file = "ujson-5.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8cd622c069368d5074bd93817b31bdb02f8d818e57c29e206f10a1f9c6337dd"}, + {file = "ujson-5.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14f9082669f90e18e64792b3fd0bf19f2b15e7fe467534a35ea4b53f3bf4b755"}, + {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7ff6ebb43bc81b057724e89550b13c9a30eda0f29c2f506f8b009895438f5a6"}, + {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f7f241488879d91a136b299e0c4ce091996c684a53775e63bb442d1a8e9ae22a"}, + {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5593263a7fcfb934107444bcfba9dde8145b282de0ee9f61e285e59a916dda0f"}, + {file = "ujson-5.7.0-cp311-cp311-win32.whl", hash = "sha256:26c2b32b489c393106e9cb68d0a02e1a7b9d05a07429d875c46b94ee8405bdb7"}, + {file = "ujson-5.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:ed24406454bb5a31df18f0a423ae14beb27b28cdfa34f6268e7ebddf23da807e"}, + {file = "ujson-5.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18679484e3bf9926342b1c43a3bd640f93a9eeeba19ef3d21993af7b0c44785d"}, + {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee295761e1c6c30400641f0a20d381633d7622633cdf83a194f3c876a0e4b7e"}, + {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b738282e12a05f400b291966630a98d622da0938caa4bc93cf65adb5f4281c60"}, + {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00343501dbaa5172e78ef0e37f9ebd08040110e11c12420ff7c1f9f0332d939e"}, + {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c0d1f7c3908357ee100aa64c4d1cf91edf99c40ac0069422a4fd5fd23b263263"}, + {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a5d2f44331cf04689eafac7a6596c71d6657967c07ac700b0ae1c921178645da"}, + {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:16b2254a77b310f118717715259a196662baa6b1f63b1a642d12ab1ff998c3d7"}, + {file = "ujson-5.7.0-cp37-cp37m-win32.whl", hash = "sha256:6faf46fa100b2b89e4db47206cf8a1ffb41542cdd34dde615b2fc2288954f194"}, + {file = "ujson-5.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ff0004c3f5a9a6574689a553d1b7819d1a496b4f005a7451f339dc2d9f4cf98c"}, + {file = "ujson-5.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:75204a1dd7ec6158c8db85a2f14a68d2143503f4bafb9a00b63fe09d35762a5e"}, + {file = "ujson-5.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7312731c7826e6c99cdd3ac503cd9acd300598e7a80bcf41f604fee5f49f566c"}, + {file = "ujson-5.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b9dc5a90e2149643df7f23634fe202fed5ebc787a2a1be95cf23632b4d90651"}, + {file = "ujson-5.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a6961fc48821d84b1198a09516e396d56551e910d489692126e90bf4887d29"}, + {file = "ujson-5.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b01a9af52a0d5c46b2c68e3f258fdef2eacaa0ce6ae3e9eb97983f5b1166edb6"}, + {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7316d3edeba8a403686cdcad4af737b8415493101e7462a70ff73dd0609eafc"}, + {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ee997799a23227e2319a3f8817ce0b058923dbd31904761b788dc8f53bd3e30"}, + {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dda9aa4c33435147262cd2ea87c6b7a1ca83ba9b3933ff7df34e69fee9fced0c"}, + {file = "ujson-5.7.0-cp38-cp38-win32.whl", hash = "sha256:bea8d30e362180aafecabbdcbe0e1f0b32c9fa9e39c38e4af037b9d3ca36f50c"}, + {file = "ujson-5.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:c96e3b872bf883090ddf32cc41957edf819c5336ab0007d0cf3854e61841726d"}, + {file = "ujson-5.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6411aea4c94a8e93c2baac096fbf697af35ba2b2ed410b8b360b3c0957a952d3"}, + {file = "ujson-5.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d3b3499c55911f70d4e074c626acdb79a56f54262c3c83325ffb210fb03e44d"}, + {file = "ujson-5.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341f891d45dd3814d31764626c55d7ab3fd21af61fbc99d070e9c10c1190680b"}, + {file = "ujson-5.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f242eec917bafdc3f73a1021617db85f9958df80f267db69c76d766058f7b19"}, + {file = "ujson-5.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3af9f9f22a67a8c9466a32115d9073c72a33ae627b11de6f592df0ee09b98b6"}, + {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a3d794afbf134df3056a813e5c8a935208cddeae975bd4bc0ef7e89c52f0ce0"}, + {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:800bf998e78dae655008dd10b22ca8dc93bdcfcc82f620d754a411592da4bbf2"}, + {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5ac3d5c5825e30b438ea92845380e812a476d6c2a1872b76026f2e9d8060fc2"}, + {file = "ujson-5.7.0-cp39-cp39-win32.whl", hash = "sha256:cd90027e6d93e8982f7d0d23acf88c896d18deff1903dd96140613389b25c0dd"}, + {file = "ujson-5.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:523ee146cdb2122bbd827f4dcc2a8e66607b3f665186bce9e4f78c9710b6d8ab"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e87cec407ec004cf1b04c0ed7219a68c12860123dfb8902ef880d3d87a71c172"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bab10165db6a7994e67001733f7f2caf3400b3e11538409d8756bc9b1c64f7e8"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b522be14a28e6ac1cf818599aeff1004a28b42df4ed4d7bc819887b9dac915fc"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7592f40175c723c032cdbe9fe5165b3b5903604f774ab0849363386e99e1f253"}, + {file = "ujson-5.7.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ed22f9665327a981f288a4f758a432824dc0314e4195a0eaeb0da56a477da94d"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:adf445a49d9a97a5a4c9bb1d652a1528de09dd1c48b29f79f3d66cea9f826bf6"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64772a53f3c4b6122ed930ae145184ebaed38534c60f3d859d8c3f00911eb122"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35209cb2c13fcb9d76d249286105b4897b75a5e7f0efb0c0f4b90f222ce48910"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90712dfc775b2c7a07d4d8e059dd58636bd6ff1776d79857776152e693bddea6"}, + {file = "ujson-5.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0e4e8981c6e7e9e637e637ad8ffe948a09e5434bc5f52ecbb82b4b4cfc092bfb"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:581c945b811a3d67c27566539bfcb9705ea09cb27c4be0002f7a553c8886b817"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d36a807a24c7d44f71686685ae6fbc8793d784bca1adf4c89f5f780b835b6243"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b4257307e3662aa65e2644a277ca68783c5d51190ed9c49efebdd3cbfd5fa44"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea7423d8a2f9e160c5e011119741682414c5b8dce4ae56590a966316a07a4618"}, + {file = "ujson-5.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c592eb91a5968058a561d358d0fef59099ed152cfb3e1cd14eee51a7a93879e"}, + {file = "ujson-5.7.0.tar.gz", hash = "sha256:e788e5d5dcae8f6118ac9b45d0b891a0d55f7ac480eddcb7f07263f2bcf37b23"}, @@ -5067,2 +5141,2 @@ urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, @@ -5152,79 +5226,98 @@ xxhash = [ - {file = "xxhash-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e9141f224a4dc984ea016744aa40a8a040054ef91933b2f9c81ba18e5b9d06e"}, - {file = "xxhash-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b8810fa72d361262168c2b215e3cee223eb19b74806c08713b943f57f0c91fd6"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb07bb4125c432f500a76a84ef51c0eafc09afbd1479308c6e1e2bbb73a33bb4"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4582e12d9aa25530449a8cad4e9e8e973e0b2f28e77ef6504fc6f216f8f07406"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72b5d4eee98ecd25a2c647f7547a024585400ab13aa7ec837ebb8a25151bbef"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f50cfc3e80fe241d25e557f7ca5a145d3d557bdf738cd2d355bfe1324c28d21"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90d03b46fc06f9ca3c0a4db685df3efffeb880ebcef2ffee707057e09fb8cba2"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:540966f42ccd0d3d09539a7236fbfdce6b15d7be49ee5d5adaef0aa0d020cd1e"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2d65d773c4d9c8d1a88af8e0e2169910cfc3d425006e2eb18cd13a6391543ed1"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3a9e2052ac6520e1f56630ff689b2b85ccd24799d362493435cf46defe163cc1"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:77c6d1e0993e8a314f4a6aec911c12fbb4caf4f58223381d3d41fa153ae6924f"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aa4e22665290f2dfa608901c38b1a5f0d367280fd8adc5996356d7f4993f41f7"}, - {file = "xxhash-3.1.0-cp310-cp310-win32.whl", hash = "sha256:5c65cfb8fd3efd3d574a0cd4abbe59741f720334fa1d79e5366b34b0f4584b66"}, - {file = "xxhash-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8f32770a4b39ffe6a5785f87b66435b2e4048ba4a36334108ac5d375447ce667"}, - {file = "xxhash-3.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:effd26669365a71e1337977ba789c95029c9cb0ac26e7455255922d3c9ff8fff"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2324c31095708ceb7ee8c15b31bd1bea7376ca477748f9a20aba2e94d01fab1"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1980f79c8ab4536c01048eb6398db0ac2049292150064bef665fa4c89918f86c"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11d4af8d50b8b08835f653a96d58bb3658454144e5e4d28e369f4b3ad2bff4ea"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7b89e0b8b26cb86369ca0a7395299e0046930664ce96cbc07702504af9a26"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca24dd052adf23e1fd8fb5839d9046328e60222a866fa3c2761e90ddab1fc2b8"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f7f1b27db4798f7ebe599107c75b2a0648fc1f9d9226fa2771fc296c5593dc7e"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e24bab9aecdfba23c7feb3b173488ca6b3168a50095ff544dedc7caa0c05ac3c"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:cb3032588cd46bc6d16b2b35cd7ff5041fcc90423ae7c8f62263a029ff8f1e5d"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b9c56f45fd18879212b480dc44dc1da44a22d453e3b4038c4b686f6307124220"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:00f2603b91fbb6fd1c3b83b263a405834e2df393fd2bac6a86e2e0ecc511076c"}, - {file = "xxhash-3.1.0-cp36-cp36m-win32.whl", hash = "sha256:33f865b6eb9126a60345cf3106925a5039ef582b840d2df96f7777a160d0ef17"}, - {file = "xxhash-3.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:95175636d28943eaf3da331aa57c7d02756017880151e11f8476a2ef49dd35de"}, - {file = "xxhash-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b43b55e0d2d81f4ec8caaf165a0a96325d7dd4317770b663701700f9aee855ed"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:694fd42161959382b70ea3442ea017675071dafe8b960849d5a599c4538737d8"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a3d7cd6412a1d0d5e43be188a67f95326e5a8c5d2ae1ad10adf8f896e630091"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ab8f652ffaed3855d25f7666f63bf1ee81ead4d9d30cc9e592571a3959d2964"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2259f13de36b4e675840e50b16bcd5c6f7aec6f5e833af47b3a0186c19e92dd"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e86290486768b51478f320abca9fe61805578298b6e60719ec23bca85c60eec"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d08ebd5313f6811ef76cde0f97179374b047442b918089a09019fed53b9f9cef"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2e798287d6efcd51df12ac67f65ba7d78937be80c2c91bff2d17bf5404c63a24"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:91bf72f009114320d9cbb452d5995286f2c6f70b3f53041f72654c4c1a8b79bd"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d30df80c8bc56aa55f545b1840d84ad6f773a3623b3e1462f17ebbd93c4c69ae"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d4f9c8570e9adecae7d480090bcbf856b1d28f462c30c5cbe9f23b6418d6423"}, - {file = "xxhash-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:0bce4ce403129c6cee69860cf2597d04f29c714797c11e8ec3b2b7b3677c4586"}, - {file = "xxhash-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cf20bca461ae45273599be8635b60b517d2212b51d6d5d85fc8c441078eb02ab"}, - {file = "xxhash-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a9efbc1c780ef3b578486eb250f5e93b2934c918386d981d96b7a06bae90c4d4"}, - {file = "xxhash-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b16e7fc7991118c0b6cd9f5e141911110388f39df58b2996834619d2b956b4a8"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0f1e298f80e302fd1b859e0b27f997eae82e9e9592843a1df2ca79122365ac1"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4645b106732885fc488656657a5790dee4d8ffd123d2134647028f6575f2c05e"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a6176152a64955b3dff89dfb2d3c9a7c93e862cbc37c0858e8e25d1f3f3c"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd65c4850231324af7a613e5647c1c484f3dcbcde4a0e608d099050c684ae79"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b6cd0f781c198b0b53f78124658d0f407fbba7450e79d537505608bf4125ba"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ae86b9672ad4ef431b0e1d284530289382575e2569078071c7adcf5827b4995"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3cb1c759c2a863dd963bdca8504c7ae39388dd1ef189fca91b94f18acb7bde26"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e5d54d8e01c34acf3361bb58c5022a92abc9d5054b919a1d483679d59989bbff"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:362520c908887c85af832e16448bad58cb182d165a16241e3251bdd17bd427be"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6502a60ced52547e9c8e3eba3e5818ac0adca3e9abb5e32d2ee73a060f3dc362"}, - {file = "xxhash-3.1.0-cp38-cp38-win32.whl", hash = "sha256:4143a1ad5f436550fcc091c80e7af23ec31cca1991750391067b24b051dcd0d7"}, - {file = "xxhash-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:04812deabfdba3fa9cae57abb908a3f980bccbe9a4178f3e56afca2f1f625874"}, - {file = "xxhash-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:53e587e6625bc9a7cfa5e067837590a626ff4150000ae31be2af73a67d08ea8c"}, - {file = "xxhash-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:778f3c6007d280b6fff12f95c81d9c4ad6907632a0bfecf23aca18afb54319c0"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc93e4bd34fd14459672345ca4a070b0f28d906bea4b178373b4271498e38ec9"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65a9aac88cc960b3a21a52922d5846f1b15af7a5b937a26c7edee1d3fe80800c"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74a787be31b8493797d3e7dfac2b240ed443bcd1b42dfbb406629538f103667"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e030031f13455c9bfc10ed58301fbee8fad0e179cc6a1b15e899d71af2958f"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c627840400b90a90d989ebef431b146e89e44377de42cd2e75996bbed394e3c5"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:62cc09f3024ef1a0153e32ac6593025f20fae13b1bc5d08b639891ec110cacec"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:de9758872881d50d0946dfc6067b4782986de6af5ec74c266d47d85c699aa0de"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:216b90f30351fe434903bb039ae88df4c5ae43eb4320a012f6c73bec1d630213"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:84e9d14baccdf31e3c59ed34b9d00df52ad4db376dbbbaad936ea02b9be4a534"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f4089c92b7aac2ead222345b0368e7d69e7a61e7a56762ae2f5e8d67fb67349"}, - {file = "xxhash-3.1.0-cp39-cp39-win32.whl", hash = "sha256:dfa73020bc696a46dab2dddd28c76d1abcd0643dc8a2dd06a037392bda5cc5ec"}, - {file = "xxhash-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0602b45447934fd5b81b387e76512a7c7c946b571b3f9a7d7b2cd9d3a09f9041"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a06311c247f2f45619e754249ca6f868c349fbfb63979ce291c83151840952a2"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f109a872aba254ffebe3c749a6b7148463e5d3168ac5afb515f1a929e73feb8f"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4521fed12d111bb2691ca0dc01fa8b36f6c626f53d9ee54befcea957e1b4dbaa"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c60ff7946e46beaa873509c1ca41937b40fc3048620cbd8441bfe03aa053f33"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:379c37f2d46a441bdb71af79443990e21943ef644ffeed5662157a9a682d55be"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:daa1a37685759003680bd2775053bbf772c4f71ad3c729810ea4901535635d5e"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c58b2bc7240966f54db9ef7dcfcc988362b0a315c12ed13a778917457c8dfe9d"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efafa866662f6ab50f1ffb808424ca9373d2f3b4a73e6ea66432dce1779f501c"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea074722fa953a1a3bece979620e2f0b43f2dfca841de84aca32a477c2fdb658"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:369af334d58f6d2f716bd1557d3580c4c1235077090769abf1d54daec2b301a7"}, - {file = "xxhash-3.1.0.tar.gz", hash = "sha256:ac21b1e21dc6fdfee9a57b53f4777539d53a84f2e1546a3f802f159f9966bdc1"}, + {file = "xxhash-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af44b9e59c4b2926a4e3c7f9d29949ff42fcea28637ff6b8182e654461932be8"}, + {file = "xxhash-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1bdd57973e2b802ef32553d7bebf9402dac1557874dbe5c908b499ea917662cd"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b7c9aa77bbce61a5e681bd39cb6a804338474dcc90abe3c543592aa5d6c9a9b"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11bf87dc7bb8c3b0b5e24b7b941a9a19d8c1f88120b6a03a17264086bc8bb023"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2783d41487ce6d379fdfaa7332fca5187bf7010b9bddcf20cafba923bc1dc665"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:561076ca0dcef2fbc20b2bc2765bff099e002e96041ae9dbe910a863ca6ee3ea"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a26eeb4625a6e61cedc8c1b39b89327c9c7e1a8c2c4d786fe3f178eb839ede6"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d93a44d0104d1b9b10de4e7aadf747f6efc1d7ec5ed0aa3f233a720725dd31bd"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:89585adc73395a10306d2e2036e50d6c4ac0cf8dd47edf914c25488871b64f6d"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a892b4b139126a86bfdcb97cd912a2f8c4e8623869c3ef7b50871451dd7afeb0"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e998efb190653f70e0f30d92b39fc645145369a4823bee46af8ddfc244aa969d"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8ed3bd2b8bb3277710843ca63e4f5c3ee6f8f80b083be5b19a7a9905420d11e"}, + {file = "xxhash-3.2.0-cp310-cp310-win32.whl", hash = "sha256:20181cbaed033c72cb881b2a1d13c629cd1228f113046133469c9a48cfcbcd36"}, + {file = "xxhash-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a0f7a16138279d707db778a63264d1d6016ac13ffd3f1e99f54b2855d6c0d8e1"}, + {file = "xxhash-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5daff3fb5bfef30bc5a2cb143810d376d43461445aa17aece7210de52adbe151"}, + {file = "xxhash-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75bb5be3c5de702a547715f320ecf5c8014aeca750ed5147ca75389bd22e7343"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01f36b671ff55cb1d5c2f6058b799b697fd0ae4b4582bba6ed0999678068172a"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4d4519123aac73c93159eb8f61db9682393862dd669e7eae034ecd0a35eadac"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:994e4741d5ed70fc2a335a91ef79343c6b1089d7dfe6e955dd06f8ffe82bede6"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919bc1b010aa6ff0eb918838ff73a435aed9e9a19c3202b91acecd296bf75607"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17b65454c5accbb079c45eca546c27c4782f5175aa320758fafac896b1549d27"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b0c094d5e65a46dbf3fe0928ff20873a747e6abfd2ed4b675beeb2750624bc2e"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f94163ebe2d5546e6a5977e96d83621f4689c1054053428cf8d4c28b10f92f69"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cead7c0307977a00b3f784cff676e72c147adbcada19a2e6fc2ddf54f37cf387"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a0e1bd0260c1da35c1883321ce2707ceea07127816ab625e1226ec95177b561a"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc8878935671490efe9275fb4190a6062b73277bd273237179b9b5a2aa436153"}, + {file = "xxhash-3.2.0-cp311-cp311-win32.whl", hash = "sha256:a433f6162b18d52f7068175d00bd5b1563b7405f926a48d888a97b90a160c40d"}, + {file = "xxhash-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:a32d546a1752e4ee7805d6db57944f7224afa7428d22867006b6486e4195c1f3"}, + {file = "xxhash-3.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:82daaab720866bf690b20b49de5640b0c27e3b8eea2d08aa75bdca2b0f0cfb63"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3126df6520cbdbaddd87ce74794b2b6c45dd2cf6ac2b600a374b8cdb76a2548c"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e172c1ee40507ae3b8d220f4048aaca204f203e1e4197e8e652f5c814f61d1aa"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5384f1d9f30876f5d5b618464fb19ff7ce6c0fe4c690fbaafd1c52adc3aae807"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26cb52174a7e96a17acad27a3ca65b24713610ac479c99ac9640843822d3bebf"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbcd613a5e76b1495fc24db9c37a6b7ee5f214fd85979187ec4e032abfc12ded"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f988daf25f31726d5b9d0be6af636ca9000898f9ea43a57eac594daea25b0948"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bbc30c98ab006ab9fc47e5ed439c00f706bc9d4441ff52693b8b6fea335163e0"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2408d49260b0a4a7cc6ba445aebf38e073aeaf482f8e32767ca477e32ccbbf9e"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:3f4152fd0bf8b03b79f2f900fd6087a66866537e94b5a11fd0fd99ef7efe5c42"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0eea848758e4823a01abdbcccb021a03c1ee4100411cbeeb7a5c36a202a0c13c"}, + {file = "xxhash-3.2.0-cp36-cp36m-win32.whl", hash = "sha256:77709139af5123c578ab06cf999429cdb9ab211047acd0c787e098dcb3f1cb4d"}, + {file = "xxhash-3.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:91687671fd9d484a4e201ad266d366b695a45a1f2b41be93d116ba60f1b8f3b3"}, + {file = "xxhash-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e4af8bc5c3fcc2192c266421c6aa2daab1a18e002cb8e66ef672030e46ae25cf"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8be562e2ce3e481d9209b6f254c3d7c5ff920eb256aba2380d2fb5ba75d4f87"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9eba0c7c12126b12f7fcbea5513f28c950d28f33d2a227f74b50b77789e478e8"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2198c4901a0223c48f6ec0a978b60bca4f4f7229a11ca4dc96ca325dd6a29115"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50ce82a71b22a3069c02e914bf842118a53065e2ec1c6fb54786e03608ab89cc"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5019fb33711c30e54e4e57ae0ca70af9d35b589d385ac04acd6954452fa73bb"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d54ac023eef7e3ac9f0b8841ae8a376b933043bc2ad428121346c6fa61c491c"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c55fa832fc3fe64e0d29da5dc9b50ba66ca93312107cec2709300ea3d3bab5c7"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4ce006215497993ae77c612c1883ca4f3973899573ce0c52fee91f0d39c4561"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1afb9b9d27fd675b436cb110c15979976d92d761ad6e66799b83756402f3a974"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:baa99cebf95c1885db21e119395f222a706a2bb75a545f0672880a442137725e"}, + {file = "xxhash-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:75aa692936942ccb2e8fd6a386c81c61630ac1b6d6e921698122db8a930579c3"}, + {file = "xxhash-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0a2cdfb5cae9fafb9f7b65fd52ecd60cf7d72c13bb2591ea59aaefa03d5a8827"}, + {file = "xxhash-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a68d1e8a390b660d94b9360ae5baa8c21a101bd9c4790a8b30781bada9f1fc6"}, + {file = "xxhash-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ce7c3ce28f94302df95eaea7c9c1e2c974b6d15d78a0c82142a97939d7b6c082"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcb419bf7b0bc77d366e5005c25682249c5521a63fd36c51f584bd91bb13bd5"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae521ed9287f86aac979eeac43af762f03d9d9797b2272185fb9ddd810391216"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d16775094423088ffa357d09fbbb9ab48d2fb721d42c0856b801c86f616eec"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe454aeab348c42f56d6f7434ff758a3ef90787ac81b9ad5a363cd61b90a1b0b"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052fd0efdd5525c2dbc61bebb423d92aa619c4905bba605afbf1e985a562a231"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:02badf3754e2133de254a4688798c4d80f0060635087abcb461415cb3eb82115"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:66b8a90b28c13c2aae7a71b32638ceb14cefc2a1c8cf23d8d50dfb64dfac7aaf"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:649cdf19df175925ad87289ead6f760cd840730ee85abc5eb43be326a0a24d97"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4b948a03f89f5c72d69d40975af8af241111f0643228796558dc1cae8f5560b0"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49f51fab7b762da7c2cee0a3d575184d3b9be5e2f64f26cae2dd286258ac9b3c"}, + {file = "xxhash-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1a42994f0d42b55514785356722d9031f064fd34e495b3a589e96db68ee0179d"}, + {file = "xxhash-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0a6d58ba5865475e53d6c2c4fa6a62e2721e7875e146e2681e5337a6948f12e7"}, + {file = "xxhash-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aabdbc082030f8df613e2d2ea1f974e7ad36a539bdfc40d36f34e55c7e4b8e94"}, + {file = "xxhash-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:498843b66b9ca416e9d03037e5875c8d0c0ab9037527e22df3b39aa5163214cd"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a910b1193cd90af17228f5d6069816646df0148f14f53eefa6b2b11a1dedfcd0"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb6d8ce31dc25faf4da92991320e211fa7f42de010ef51937b1dc565a4926501"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:883dc3d3942620f4c7dbc3fd6162f50a67f050b714e47da77444e3bcea7d91cc"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59dc8bfacf89b8f5be54d55bc3b4bd6d74d0c5320c8a63d2538ac7df5b96f1d5"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61e6aa1d30c2af692aa88c4dd48709426e8b37bff6a574ee2de677579c34a3d6"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:314ec0bd21f0ee8d30f2bd82ed3759314bd317ddbbd8555668f3d20ab7a8899a"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dad638cde3a5357ad3163b80b3127df61fb5b5e34e9e05a87697144400ba03c7"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:eaa3ea15025b56076d806b248948612289b093e8dcda8d013776b3848dffff15"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7deae3a312feb5c17c97cbf18129f83cbd3f1f9ec25b0f50e2bd9697befb22e7"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:add774341c09853b1612c64a526032d95ab1683053325403e1afbe3ad2f374c5"}, + {file = "xxhash-3.2.0-cp39-cp39-win32.whl", hash = "sha256:9b94749130ef3119375c599bfce82142c2500ef9ed3280089157ee37662a7137"}, + {file = "xxhash-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e57d94a1552af67f67b27db5dba0b03783ea69d5ca2af2f40e098f0ba3ce3f5f"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92fd765591c83e5c5f409b33eac1d3266c03d3d11c71a7dbade36d5cdee4fbc0"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8970f6a411a9839a02b23b7e90bbbba4a6de52ace009274998566dc43f36ca18"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5f3e33fe6cbab481727f9aeb136a213aed7e33cd1ca27bd75e916ffacc18411"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:368265392cb696dd53907e2328b5a8c1bee81cf2142d0cc743caf1c1047abb36"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3b1f3c6d67fa9f49c4ff6b25ce0e7143bab88a5bc0f4116dd290c92337d0ecc7"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c5e8db6e1ee7267b7c412ad0afd5863bf7a95286b8333a5958c8097c69f94cf5"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:761df3c7e2c5270088b691c5a8121004f84318177da1ca1db64222ec83c44871"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2d15a707e7f689531eb4134eccb0f8bf3844bb8255ad50823aa39708d9e6755"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6b2ba4ff53dd5f57d728095e3def7375eb19c90621ce3b41b256de84ec61cfd"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:61b0bcf946fdfd8ab5f09179dc2b5c74d1ef47cedfc6ed0ec01fdf0ee8682dd3"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f7b79f0f302396d8e0d444826ceb3d07b61977793886ebae04e82796c02e42dc"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0773cd5c438ffcd5dbff91cdd503574f88a4b960e70cedeb67736583a17a918"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec1f57127879b419a2c8d2db9d9978eb26c61ae17e5972197830430ae78d25b"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d4b15c00e807b1d3d0b612338c814739dec310b80fb069bd732b98ddc709ad7"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9d3f686e3d1c8900c5459eee02b60c7399e20ec5c6402364068a343c83a61d90"}, + {file = "xxhash-3.2.0.tar.gz", hash = "sha256:1afd47af8955c5db730f630ad53ae798cf7fae0acb64cebb3cf94d35c47dd088"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 0b296faa..5f198bac 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.4-py3-none-any.whl", develop = false } @@ -34 +34 @@ tfrecord = "^1.14.1" -torchaudio = "^0.10.1" +torchaudio = "^0.13.1" diff --git a/workers/datasets_based/src/datasets_based/config.py b/workers/datasets_based/src/datasets_based/config.py index 2e0a9970..21151bca 100644 --- a/workers/datasets_based/src/datasets_based/config.py +++ b/workers/datasets_based/src/datasets_based/config.py @@ -76,6 +76,6 @@ class FirstRowsConfig: -PARQUET_COMMIT_MESSAGE = "Update parquet files" -PARQUET_COMMITTER_HF_TOKEN = None -PARQUET_MAX_DATASET_SIZE = 100_000_000 -PARQUET_SOURCE_REVISION = "main" -PARQUET_TARGET_REVISION = "refs/convert/parquet" -PARQUET_URL_TEMPLATE = "/datasets/%s/resolve/%s/%s" +PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE = "Update parquet files" +PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN = None +PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE = 100_000_000 +PARQUET_AND_DATASET_INFO_SOURCE_REVISION = "main" +PARQUET_AND_DATASET_INFO_TARGET_REVISION = "refs/convert/parquet" +PARQUET_AND_DATASET_INFO_URL_TEMPLATE = "/datasets/%s/resolve/%s/%s" @@ -89 +89 @@ def get_empty_str_list() -> List[str]: -class ParquetConfig: +class ParquetAndDatasetInfoConfig: @@ -92,6 +92,6 @@ class ParquetConfig: - commit_message: str = PARQUET_COMMIT_MESSAGE - committer_hf_token: Optional[str] = PARQUET_COMMITTER_HF_TOKEN - max_dataset_size: int = PARQUET_MAX_DATASET_SIZE - source_revision: str = PARQUET_SOURCE_REVISION - target_revision: str = PARQUET_TARGET_REVISION - url_template: str = PARQUET_URL_TEMPLATE + commit_message: str = PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE + committer_hf_token: Optional[str] = PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN + max_dataset_size: int = PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE + source_revision: str = PARQUET_AND_DATASET_INFO_SOURCE_REVISION + target_revision: str = PARQUET_AND_DATASET_INFO_TARGET_REVISION + url_template: str = PARQUET_AND_DATASET_INFO_URL_TEMPLATE @@ -100 +100 @@ class ParquetConfig: - def from_env() -> "ParquetConfig": + def from_env() -> "ParquetAndDatasetInfoConfig": @@ -102,2 +102,2 @@ class ParquetConfig: - with env.prefixed("PARQUET_"): - return ParquetConfig( + with env.prefixed("PARQUET_AND_DATASET_INFO_"): + return ParquetAndDatasetInfoConfig( @@ -106,6 +106,8 @@ class ParquetConfig: - commit_message=env.str(name="COMMIT_MESSAGE", default=PARQUET_COMMIT_MESSAGE), - committer_hf_token=env.str(name="COMMITTER_HF_TOKEN", default=PARQUET_COMMITTER_HF_TOKEN), - max_dataset_size=env.int(name="MAX_DATASET_SIZE", default=PARQUET_MAX_DATASET_SIZE), - source_revision=env.str(name="SOURCE_REVISION", default=PARQUET_SOURCE_REVISION), - target_revision=env.str(name="TARGET_REVISION", default=PARQUET_TARGET_REVISION), - url_template=env.str(name="URL_TEMPLATE", default=PARQUET_URL_TEMPLATE), + commit_message=env.str(name="COMMIT_MESSAGE", default=PARQUET_AND_DATASET_INFO_COMMIT_MESSAGE), + committer_hf_token=env.str( + name="COMMITTER_HF_TOKEN", default=PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN + ), + max_dataset_size=env.int(name="MAX_DATASET_SIZE", default=PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE), + source_revision=env.str(name="SOURCE_REVISION", default=PARQUET_AND_DATASET_INFO_SOURCE_REVISION), + target_revision=env.str(name="TARGET_REVISION", default=PARQUET_AND_DATASET_INFO_TARGET_REVISION), + url_template=env.str(name="URL_TEMPLATE", default=PARQUET_AND_DATASET_INFO_URL_TEMPLATE), diff --git a/workers/datasets_based/src/datasets_based/main.py b/workers/datasets_based/src/datasets_based/main.py index 219f505e..7c91470f 100644 --- a/workers/datasets_based/src/datasets_based/main.py +++ b/workers/datasets_based/src/datasets_based/main.py @@ -5 +4,0 @@ from libcommon.queue import Queue -from libcommon.worker_loop import WorkerLoop @@ -8,0 +8 @@ from datasets_based.worker_factory import DatasetBasedWorkerFactory +from datasets_based.worker_loop import WorkerLoop diff --git a/libs/libcommon/src/libcommon/worker.py b/workers/datasets_based/src/datasets_based/worker.py similarity index 96% rename from libs/libcommon/src/libcommon/worker.py rename to workers/datasets_based/src/datasets_based/worker.py index 1083c047..f43511ea 100644 --- a/libs/libcommon/src/libcommon/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -9,2 +8,0 @@ from typing import Any, Literal, Mapping, Optional -from packaging import version - @@ -23,8 +21 @@ from libcommon.simple_cache import ( - - -def parse_version(string_version: str) -> version.Version: - parsed_version = version.parse(string_version) - if isinstance(parsed_version, version.LegacyVersion): - raise ValueError(f"LegacyVersion is not supported: {parsed_version}") - return parsed_version - +from packaging import version @@ -218 +209 @@ class Worker(ABC): - return parse_version(self.get_version()).major - parse_version(other_version).major + return version.parse(self.get_version()).major - version.parse(other_version).major @@ -271 +261,0 @@ class Worker(ABC): - self.create_children_jobs(self.get_new_splits(content)) @@ -274,0 +265 @@ class Worker(ABC): + self.create_children_jobs(self.get_new_splits(content)) diff --git a/workers/datasets_based/src/datasets_based/worker_factory.py b/workers/datasets_based/src/datasets_based/worker_factory.py index a032f9a7..4924b6a3 100644 --- a/workers/datasets_based/src/datasets_based/worker_factory.py +++ b/workers/datasets_based/src/datasets_based/worker_factory.py @@ -4,3 +4,7 @@ -from libcommon.worker import JobInfo, Worker, WorkerFactory - -from datasets_based.config import AppConfig, FirstRowsConfig, ParquetConfig +from datasets_based.config import ( + AppConfig, + FirstRowsConfig, + ParquetAndDatasetInfoConfig, +) +from datasets_based.worker import JobInfo, Worker, WorkerFactory +from datasets_based.workers.dataset_info import DatasetInfoWorker @@ -8,0 +13 @@ from datasets_based.workers.parquet import ParquetWorker +from datasets_based.workers.parquet_and_dataset_info import ParquetAndDatasetInfoWorker @@ -24,3 +29,5 @@ class DatasetBasedWorkerFactory(WorkerFactory): - elif job_type == ParquetWorker.get_job_type(): - return ParquetWorker( - job_info=job_info, app_config=self.app_config, parquet_config=ParquetConfig.from_env() + elif job_type == ParquetAndDatasetInfoWorker.get_job_type(): + return ParquetAndDatasetInfoWorker( + job_info=job_info, + app_config=self.app_config, + parquet_and_dataset_info_config=ParquetAndDatasetInfoConfig.from_env(), @@ -27,0 +35,4 @@ class DatasetBasedWorkerFactory(WorkerFactory): + elif job_type == ParquetWorker.get_job_type(): + return ParquetWorker(job_info=job_info, app_config=self.app_config) + elif job_type == DatasetInfoWorker.get_job_type(): + return DatasetInfoWorker(job_info=job_info, app_config=self.app_config) @@ -31,0 +43 @@ class DatasetBasedWorkerFactory(WorkerFactory): + ParquetAndDatasetInfoWorker.get_job_type(), @@ -32,0 +45 @@ class DatasetBasedWorkerFactory(WorkerFactory): + DatasetInfoWorker.get_job_type(), diff --git a/libs/libcommon/src/libcommon/worker_loop.py b/workers/datasets_based/src/datasets_based/worker_loop.py similarity index 98% rename from libs/libcommon/src/libcommon/worker_loop.py rename to workers/datasets_based/src/datasets_based/worker_loop.py index d5e1e445..0a225a73 100644 --- a/libs/libcommon/src/libcommon/worker_loop.py +++ b/workers/datasets_based/src/datasets_based/worker_loop.py @@ -9,2 +8,0 @@ from dataclasses import dataclass -from psutil import cpu_count, disk_usage, getloadavg, swap_memory, virtual_memory - @@ -13 +11,3 @@ from libcommon.queue import EmptyQueueError, Queue -from libcommon.worker import WorkerFactory +from psutil import cpu_count, disk_usage, getloadavg, swap_memory, virtual_memory + +from datasets_based.worker import WorkerFactory diff --git a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py index 31996058..8e060272 100644 --- a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py +++ b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py @@ -14 +13,0 @@ from libcommon.storage import init_dir, remove_dir -from libcommon.worker import JobInfo, Worker @@ -16,0 +16 @@ from datasets_based.config import AppConfig, DatasetsBasedConfig +from datasets_based.worker import JobInfo, Worker diff --git a/workers/datasets_based/src/datasets_based/workers/dataset_info.py b/workers/datasets_based/src/datasets_based/workers/dataset_info.py new file mode 100644 index 00000000..9a13fcc1 --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/dataset_info.py @@ -0,0 +1,117 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from http import HTTPStatus +from typing import Any, Literal, Mapping, Optional, TypedDict + +from libcommon.dataset import DatasetNotFoundError +from libcommon.exceptions import CustomError +from libcommon.simple_cache import DoesNotExist, SplitFullName, get_response + +from datasets_based.config import AppConfig +from datasets_based.worker import JobInfo, Worker + +DatasetInfoWorkerErrorCode = Literal[ + "PreviousStepStatusError", + "PreviousStepFormatError", +] + + +class DatasetInfoResponse(TypedDict): + dataset_info: dict[str, Any] + + +class DatasetInfoWorkerError(CustomError): + """Base class for exceptions in this module.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: DatasetInfoWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__(message, status_code, str(code), cause, disclose_cause) + + +class PreviousStepStatusError(DatasetInfoWorkerError): + """Raised when the previous step gave an error. The job should not have been created.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "PreviousStepStatusError", cause, False) + + +class PreviousStepFormatError(DatasetInfoWorkerError): + """Raised when the content of the previous step has not the expected format.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "PreviousStepFormatError", cause, False) + + +def compute_dataset_info_response(dataset: str) -> DatasetInfoResponse: + """ + Get the response of /dataset-info for one specific dataset on huggingface.co. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + Returns: + `DatasetInfoResponse`: An object with the parquet_response (list of parquet files). + <Tip> + Raises the following errors: + - [`~dataset_info.worker.PreviousStepStatusError`] + If the the previous step gave an error. + - [`~dataset_info.worker.PreviousStepFormatError`] + If the content of the previous step has not the expected format + </Tip> + """ + logging.info(f"get dataset_info for dataset={dataset}") + + try: + response = get_response(kind="/parquet-and-dataset-info", dataset=dataset) + except DoesNotExist as e: + raise DatasetNotFoundError("No response found in previous step for this dataset.", e) from e + if response["http_status"] != HTTPStatus.OK: + raise PreviousStepStatusError( + f"Previous step gave an error: {response['http_status']}. This job should not have been created." + ) + content = response["content"] + if "dataset_info" not in content: + raise PreviousStepFormatError("Previous step did not return the expected content.") + return { + "dataset_info": content["dataset_info"], + } + + +class DatasetInfoWorker(Worker): + @staticmethod + def get_job_type() -> str: + return "/dataset-info" + + @staticmethod + def get_version() -> str: + return "2.0.0" + + def __init__(self, job_info: JobInfo, app_config: AppConfig) -> None: + job_type = job_info["type"] + try: + processing_step = app_config.processing_graph.graph.get_step_by_job_type(job_type) + except ValueError as e: + raise ValueError( + f"Unsupported job type: '{job_type}'. The job types declared in the processing graph are:" + f" {[step.job_type for step in app_config.processing_graph.graph.steps.values()]}" + ) from e + super().__init__(job_info=job_info, common_config=app_config.common, processing_step=processing_step) + + def compute(self) -> Mapping[str, Any]: + return compute_dataset_info_response(dataset=self.dataset) + + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return { + SplitFullName(dataset=self.dataset, config=config, split=split) + for config in content["dataset_info"].keys() + for split in content["dataset_info"][config]["splits"].keys() + } diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index 170f6e64..752ee9c8 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -24 +23,0 @@ from libcommon.utils import orjson_dumps -from libcommon.worker import ConfigNotFoundError, JobInfo, SplitNotFoundError @@ -27,0 +27 @@ from datasets_based.features import get_cell_value +from datasets_based.worker import ConfigNotFoundError, JobInfo, SplitNotFoundError diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index e577d712..df846dfc 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -4,2 +3,0 @@ -import contextlib -import glob @@ -7 +4,0 @@ import logging -import re @@ -9,3 +6 @@ from http import HTTPStatus -from pathlib import Path -from typing import Any, List, Literal, Mapping, Optional, Tuple, TypedDict -from urllib.parse import quote +from typing import Any, List, Literal, Mapping, Optional, TypedDict @@ -13,13 +8 @@ from urllib.parse import quote -import datasets.config -from datasets import get_dataset_config_names, get_dataset_infos, load_dataset_builder -from datasets.data_files import EmptyDatasetError as _EmptyDatasetError -from huggingface_hub.hf_api import ( - CommitOperation, - CommitOperationAdd, - CommitOperationDelete, - DatasetInfo, - HfApi, - RepoFile, -) -from huggingface_hub.utils import RepositoryNotFoundError, RevisionNotFoundError -from libcommon.dataset import ask_access +from libcommon.dataset import DatasetNotFoundError @@ -27,2 +10 @@ from libcommon.exceptions import CustomError -from libcommon.simple_cache import SplitFullName -from libcommon.worker import DatasetNotFoundError, JobInfo +from libcommon.simple_cache import DoesNotExist, SplitFullName, get_response @@ -30,2 +12,3 @@ from libcommon.worker import DatasetNotFoundError, JobInfo -from datasets_based.config import AppConfig, ParquetConfig -from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker +from datasets_based.config import AppConfig +from datasets_based.worker import JobInfo, Worker +from datasets_based.workers.parquet_and_dataset_info import ParquetFileItem @@ -34,6 +17,2 @@ ParquetWorkerErrorCode = Literal[ - "DatasetRevisionNotFoundError", - "EmptyDatasetError", - "ConfigNamesError", - "DatasetInBlockListError", - "DatasetTooBigFromHubError", - "DatasetTooBigFromDatasetsError", + "PreviousStepStatusError", + "PreviousStepFormatError", @@ -42,0 +22,4 @@ ParquetWorkerErrorCode = Literal[ +class ParquetResponse(TypedDict): + parquet_files: List[ParquetFileItem] + + @@ -57,23 +40,2 @@ class ParquetWorkerError(CustomError): -class DatasetRevisionNotFoundError(ParquetWorkerError): - """Raised when the revision of a dataset repository does not exist.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.NOT_FOUND, "DatasetRevisionNotFoundError", cause, False) - - -class ConfigNamesError(ParquetWorkerError): - """Raised when the configuration names could not be fetched.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "ConfigNamesError", cause, True) - - -class EmptyDatasetError(ParquetWorkerError): - """Raised when the dataset has no data.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) - - -class DatasetInBlockListError(ParquetWorkerError): - """Raised when the dataset is in the list of blocked datasets.""" +class PreviousStepStatusError(ParquetWorkerError): + """Raised when the previous step gave an error. The job should not have been created.""" @@ -82 +44 @@ class DatasetInBlockListError(ParquetWorkerError): - super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetInBlockListError", cause, False) + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "PreviousStepStatusError", cause, False) @@ -85,2 +47,2 @@ class DatasetInBlockListError(ParquetWorkerError): -class DatasetTooBigFromHubError(ParquetWorkerError): - """Raised when the dataset size (sum of files on the Hub) is too big.""" +class PreviousStepFormatError(ParquetWorkerError): + """Raised when the content of the previous step has not the expected format.""" @@ -89,36 +51 @@ class DatasetTooBigFromHubError(ParquetWorkerError): - super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetTooBigFromHubError", cause, False) - - -class DatasetTooBigFromDatasetsError(ParquetWorkerError): - """Raised when the dataset size (sum of config sizes given by the datasets library) is too big.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetTooBigFromDatasetsError", cause, False) - - -class ParquetFileItem(TypedDict): - dataset: str - config: str - split: str - url: str - filename: str - size: int - - -class ParquetResponse(TypedDict): - parquet_files: List[ParquetFileItem] - - -DATASET_TYPE = "dataset" - - -class ParquetFile: - def __init__(self, local_file: str, local_dir: str, config: str): - if not local_file.startswith(local_dir): - raise ValueError(f"{local_file} is not in {local_dir}") - self.local_file = local_file - self.local_dir = local_dir - self.config = config - - def repo_file(self) -> str: - return f'{self.config}/{self.local_file.removeprefix(f"{self.local_dir}/")}' + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "PreviousStepFormatError", cause, False) @@ -127,287 +54 @@ class ParquetFile: -# TODO: use huggingface_hub's hf_hub_url after -# https://github.com/huggingface/huggingface_hub/issues/1082 -def hf_hub_url(repo_id: str, filename: str, hf_endpoint: str, revision: str, url_template: str) -> str: - return (hf_endpoint + url_template) % (repo_id, quote(revision, safe=""), filename) - - -p = re.compile(r"[\w]+-(?P<split>[\w]+?)(-[0-9]{5}-of-[0-9]{5})?.parquet") - - -def parse_repo_filename(filename: str) -> Tuple[str, str]: - parts = filename.split("/") - if len(parts) != 2: - raise ValueError(f"Invalid filename: {filename}") - config, fname = parts - m = p.match(fname) - if not m: - raise ValueError(f"Cannot parse {filename}") - split = m.group("split") - return config, split - - -def create_parquet_file_item( - repo_file: RepoFile, - dataset: str, - hf_endpoint: str, - target_revision: str, - url_template: str, -) -> ParquetFileItem: - if repo_file.size is None: - raise ValueError(f"Cannot get size of {repo_file.rfilename}") - config, split = parse_repo_filename(repo_file.rfilename) - return { - "dataset": dataset, - "config": config, - "split": split, - "url": hf_hub_url( - repo_id=dataset, - filename=repo_file.rfilename, - hf_endpoint=hf_endpoint, - revision=target_revision, - url_template=url_template, - ), - "filename": Path(repo_file.rfilename).name, - "size": repo_file.size, - } - - -def raise_if_blocked( - dataset: str, - blocked_datasets: List[str], -) -> None: - """ - Raise an error if the dataset is in the list of blocked datasets - - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - blocked_datasets (`List[str]`): - The list of blocked datasets. If empty, no dataset is blocked. - Returns: - `None` - <Tip> - Raises the following errors: - - [`~parquet.worker.DatasetInBlockListError`] - If the dataset is in the list of blocked datasets. - </Tip> - """ - if dataset in blocked_datasets: - raise DatasetInBlockListError( - "The parquet conversion has been disabled for this dataset for now. Please open an issue in" - " https://github.com/huggingface/datasets-server if you want this dataset to be supported." - ) - - -def get_dataset_info_or_raise( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str], - revision: str, -) -> DatasetInfo: - """ - Return the dataset info if possible. - Raise an error if the dataset cannot be accessed (does not exist, gated with extra fields, private) - - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, `optional`): - An app authentication token with read access to all the datasets. - revision (`str`): - The git revision (e.g. "main" or sha) of the dataset - Returns: - `DatasetInfo`: The dataset info - <Tip> - Raises the following errors: - - [`~libcommon.worker.DatasetNotFoundError`] - If the repository to download from cannot be found. This may be because it doesn't exist, - or because it is set to `private` and you do not have access. - - [`~parquet.worker.DatasetRevisionNotFoundError`] - If the revision does not exist or cannot be accessed using the token. - </Tip> - """ - try: - dataset_info = HfApi(endpoint=hf_endpoint, token=hf_token).dataset_info( - repo_id=dataset, revision=revision, files_metadata=True - ) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err - except RevisionNotFoundError as err: - raise DatasetRevisionNotFoundError("The dataset revision does not exist on the Hub.") from err - return dataset_info - - -def raise_if_too_big_from_hub( - dataset_info: DatasetInfo, - max_dataset_size: int, -) -> None: - """ - Raise an error if the dataset is too big to be converted to parquet - - Args: - dataset_info (`DatasetInfo`): - The dataset info - max_dataset_size (`int`): - The maximum size of the dataset in bytes - Returns: - `None` - <Tip> - Raises the following errors: - - [`~parquet.worker.DatasetTooBigFromHubError`] - If the dataset is too big to be converted to parquet - </Tip> - """ - dataset_size: int = sum(sibling.size for sibling in dataset_info.siblings if sibling.size is not None) - if dataset_size > max_dataset_size: - raise DatasetTooBigFromHubError( - f"The conversion to parquet is limited to datasets under {max_dataset_size} bytes. " - f"Current size of files on the hub is {dataset_size} bytes." - ) - - -def raise_if_too_big_from_datasets( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str], - revision: str, - max_dataset_size: int, -) -> None: - """ - Raise an error if the dataset is too big to be converted to parquet, as measured by the sum of the configs - sizes given by the datasets library - - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, `optional`): - An app authentication token with read access to all the datasets. - revision (`str`): - The git revision (e.g. "main" or sha) of the dataset - max_dataset_size (`int`): - The maximum size of the dataset in bytes - Returns: - `None` - <Tip> - Raises the following errors: - - [`ValueError`] - If the datasets.config.HF_ENDPOINT is not set to the expected value - - [`~parquet.worker.DatasetTooBigFromDatasetsError`] - If the dataset is too big to be converted to parquet - </Tip> - """ - if datasets.config.HF_ENDPOINT != hf_endpoint: - raise ValueError( - "datasets.config.HF_ENDPOINT should have already been set to {hf_endpoint}. " - f"Current value: {datasets.config.HF_ENDPOINT}. " - ) - dataset_size = 0 - with contextlib.suppress(Exception): - infos = get_dataset_infos(path=dataset, revision=revision, use_auth_token=hf_token) - dataset_size = sum(value.dataset_size for value in infos.values() if value.dataset_size is not None) - if dataset_size > max_dataset_size: - raise DatasetTooBigFromDatasetsError( - f"The conversion to parquet is limited to datasets under {max_dataset_size} bytes. " - f"Current size as given per the datasets library is {dataset_size} bytes." - ) - - -def raise_if_not_supported( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str], - committer_hf_token: Optional[str], - revision: str, - supported_datasets: List[str], - blocked_datasets: List[str], - max_dataset_size: int, -) -> None: - """ - Raise an error if the dataset is not supported: - - if the dataset is in the list of blocked datasets - - if the dataset cannot be accessed (does not exist, gated with extra fields, private) - - if the dataset is too big, and not in the list of supported datasets - - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, `optional`): - An app authentication token with read access to all the datasets. - committer_hf_token (`str`, `optional`): - A user authentication token (See https://huggingface.co/settings/token) with write access. It must: - - be part of the `huggingface` organization (to create the ref/convert/parquet "branch") - - be part of the `datasets-maintainers` organization (to push to the ref/convert/parquet "branch") - revision (`str`): - The git revision (e.g. "main" or sha) of the dataset - supported_datasets (`List[str]`): - The list of supported datasets, saving the blocked datasets. If empty, all datasets are supported - (saving the blocked datasets). - blocked_datasets (`List[str]`): - The list of blocked datasets. If empty, no dataset is blocked. - max_dataset_size (`int`): - The maximum size of a dataset in bytes. If the dataset is under the limit (which means that the size - can be fetched), it will be allowed. - Returns: - `ParquetResponseResult`: An object with the parquet_response - (dataset and list of parquet files) and the dataset_git_revision (sha) if any. - <Tip> - Raises the following errors: - - [`~parquet.worker.DatasetInBlockListError`] - If the dataset is in the list of blocked datasets. - - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. - Programmatic access is not implemented for this type of dataset because there is no easy - way to get the list of extra fields. - - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. - - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the - token does not give the sufficient access to the dataset, or if the dataset is private - (private datasets are not supported by the datasets server) - - ['~requests.exceptions.HTTPError']: any other error when asking access - - [`~parquet.worker.DatasetRevisionNotFoundError`] - If the revision does not exist or cannot be accessed using the token. - - [`~parquet.worker.DatasetTooBigFromHubError`] - If the dataset is too big to be converted to parquet - - [`ValueError`] - If the datasets.config.HF_ENDPOINT is not set to the expected value - - [`~parquet.worker.DatasetTooBigFromDatasetsError`] - If the dataset is too big to be converted to parquet - </Tip> - """ - raise_if_blocked(dataset=dataset, blocked_datasets=blocked_datasets) - ask_access(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=committer_hf_token) - dataset_info = get_dataset_info_or_raise( - dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, revision=revision - ) - if dataset in supported_datasets: - return - raise_if_too_big_from_datasets( - dataset=dataset, - hf_endpoint=hf_endpoint, - hf_token=hf_token, - revision=revision, - max_dataset_size=max_dataset_size, - ) - raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=max_dataset_size) - - -def compute_parquet_response( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str], - committer_hf_token: Optional[str], - source_revision: str, - target_revision: str, - commit_message: str, - url_template: str, - supported_datasets: List[str], - blocked_datasets: List[str], - max_dataset_size: int, -) -> ParquetResponse: +def compute_parquet_response(dataset: str) -> ParquetResponse: @@ -416 +56,0 @@ def compute_parquet_response( - It is assumed that the dataset can be accessed with the token. @@ -421,24 +60,0 @@ def compute_parquet_response( - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, `optional`): - An app authentication token with read access to all the datasets. - committer_hf_token (`str`, `optional`): - A user authentication token (See https://huggingface.co/settings/token) with write access. It must: - - be part of the `huggingface` organization (to create the ref/convert/parquet "branch") - - be part of the `datasets-maintainers` organization (to push to the ref/convert/parquet "branch") - source_revision (`str`): - The git revision (e.g. "main" or sha) of the dataset used to prepare the parquet files - target_revision (`str`): - The target git revision (e.g. "ref/convert/parquet") of the dataset where to store the parquet files - commit_message (`str`): - The commit message to use when storing the parquet files - url_template (`str`): - The template to use to build the parquet file url - supported_datasets (`List[str]`): - The list of supported datasets, saving the blocked datasets. If empty, all datasets are supported - (saving the blocked datasets). - blocked_datasets (`List[str]`): - The list of blocked datasets. If empty, no dataset is blocked. - max_dataset_size (`int`): - The maximum size of a dataset in bytes. If the dataset is under the limit (which means that the size - can be fetched), it will be allowed. @@ -446,2 +62 @@ def compute_parquet_response( - `ParquetResponseResult`: An object with the parquet_response - (dataset and list of parquet files) and the dataset_git_revision (sha) if any. + `ParquetResponse`: An object with the parquet_response (list of parquet files). @@ -450,24 +65,4 @@ def compute_parquet_response( - - [`~parquet.worker.DatasetInBlockListError`] - If the dataset is in the list of blocked datasets. - - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. - Programmatic access is not implemented for this type of dataset because there is no easy - way to get the list of extra fields. - - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. - - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the - token does not give the sufficient access to the dataset, or if the dataset is private - (private datasets are not supported by the datasets server) - - ['~requests.exceptions.HTTPError']: any other error when asking access - - [`~parquet.worker.DatasetRevisionNotFoundError`] - If the revision does not exist or cannot be accessed using the token. - - [`~parquet.worker.DatasetTooBigFromHubError`] - If the dataset is too big to be converted to parquet - - [`ValueError`] - If the datasets.config.HF_ENDPOINT is not set to the expected value - - [`~parquet.worker.DatasetTooBigFromDatasetsError`] - If the dataset is too big to be converted to parquet - - [`~parquet.worker.EmptyDatasetError`] - The dataset is empty. - - [`~parquet.worker.ConfigNamesError`] - If the list of configurations could not be obtained using the datasets library. - - [`~parquet.worker.DatasetInBlockListError`] - If the dataset is in the list of blocked datasets. + - [`~parquet.worker.PreviousStepStatusError`] + If the the previous step gave an error. + - [`~parquet.worker.PreviousStepFormatError`] + If the content of the previous step has not the expected format @@ -476,12 +71 @@ def compute_parquet_response( - logging.info(f"get splits for dataset={dataset}") - - raise_if_not_supported( - dataset=dataset, - hf_endpoint=hf_endpoint, - hf_token=hf_token, - committer_hf_token=committer_hf_token, - revision=source_revision, - supported_datasets=supported_datasets, - blocked_datasets=blocked_datasets, - max_dataset_size=max_dataset_size, - ) + logging.info(f"get parquet files for dataset={dataset}") @@ -489,4 +73,2 @@ def compute_parquet_response( - hf_api = HfApi(endpoint=hf_endpoint, token=hf_token) - committer_hf_api = HfApi(endpoint=hf_endpoint, token=committer_hf_token) - - # get the sorted list of configurations + # TODO: we should move this dependency to the Worker class: defining which are the inputs, and just getting their + # value here @@ -494,16 +76,6 @@ def compute_parquet_response( - config_names = sorted( - get_dataset_config_names(path=dataset, revision=source_revision, use_auth_token=hf_token) - ) - except _EmptyDatasetError as err: - raise EmptyDatasetError("The dataset is empty.", cause=err) from err - except Exception as err: - raise ConfigNamesError("Cannot get the configuration names for the dataset.", cause=err) from err - - # prepare the parquet files locally - parquet_files: List[ParquetFile] = [] - for config in config_names: - builder = load_dataset_builder(path=dataset, name=config, revision=source_revision, use_auth_token=hf_token) - builder.download_and_prepare(file_format="parquet") # the parquet files are stored in the cache dir - parquet_files.extend( - ParquetFile(local_file=local_file, local_dir=builder.cache_dir, config=config) - for local_file in glob.glob(f"{builder.cache_dir}**/*.parquet") + response = get_response(kind="/parquet-and-dataset-info", dataset=dataset) + except DoesNotExist as e: + raise DatasetNotFoundError("No response found in previous step for this dataset.", e) from e + if response["http_status"] != HTTPStatus.OK: + raise PreviousStepStatusError( + f"Previous step gave an error: {response['http_status']}. This job should not have been created." @@ -511,45 +83,3 @@ def compute_parquet_response( - - # create the target revision if it does not exist yet - try: - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err - except RevisionNotFoundError: - # create the parquet_ref (refs/convert/parquet) - committer_hf_api.create_branch(repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE) - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) - - # delete: - # - the previous files, - previous_files = {f.rfilename for f in target_dataset_info.siblings} - # except: - # - the files we will update, - files_to_add = {parquet_file.repo_file(): parquet_file.local_file for parquet_file in parquet_files} - # - .gitattributes if present. - files_to_delete = previous_files - set(files_to_add.keys()).union({".gitattributes"}) - delete_operations: List[CommitOperation] = [CommitOperationDelete(path_in_repo=file) for file in files_to_delete] - logging.debug(f"delete_operations={delete_operations}") - - # send the files to the target revision - add_operations: List[CommitOperation] = [ - CommitOperationAdd(path_in_repo=file, path_or_fileobj=local_file) - for (file, local_file) in files_to_add.items() - ] - logging.debug(f"add_operations={add_operations}") - - committer_hf_api.create_commit( - repo_id=dataset, - repo_type=DATASET_TYPE, - revision=target_revision, - operations=delete_operations + add_operations, - commit_message=commit_message, - parent_commit=target_dataset_info.sha, - ) - - # call the API again to get the list of parquet files - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=True) - repo_files = [repo_file for repo_file in target_dataset_info.siblings if repo_file.rfilename.endswith(".parquet")] - # we might want to check if the sha of the parquet files is the same as the one we just uploaded - # we could also check that the list of parquet files is exactly what we expect - # let's not over engineer this for now. After all, what is on the Hub is the source of truth - # and the /parquet response is more a helper to get the list of parquet files + content = response["content"] + if "parquet_files" not in content: + raise PreviousStepFormatError("Previous step did not return the expected content.") @@ -557,10 +87 @@ def compute_parquet_response( - "parquet_files": [ - create_parquet_file_item( - repo_file=repo_file, - dataset=dataset, - hf_endpoint=hf_endpoint, - target_revision=target_revision, - url_template=url_template, - ) - for repo_file in repo_files - ], + "parquet_files": content["parquet_files"], @@ -570,3 +91 @@ def compute_parquet_response( -class ParquetWorker(DatasetsBasedWorker): - parquet_config: ParquetConfig - +class ParquetWorker(Worker): @@ -581,3 +100,10 @@ class ParquetWorker(DatasetsBasedWorker): - def __init__(self, job_info: JobInfo, app_config: AppConfig, parquet_config: ParquetConfig) -> None: - super().__init__(job_info=job_info, app_config=app_config) - self.parquet_config = parquet_config + def __init__(self, job_info: JobInfo, app_config: AppConfig) -> None: + job_type = job_info["type"] + try: + processing_step = app_config.processing_graph.graph.get_step_by_job_type(job_type) + except ValueError as e: + raise ValueError( + f"Unsupported job type: '{job_type}'. The job types declared in the processing graph are:" + f" {[step.job_type for step in app_config.processing_graph.graph.steps.values()]}" + ) from e + super().__init__(job_info=job_info, common_config=app_config.common, processing_step=processing_step) @@ -586,13 +112 @@ class ParquetWorker(DatasetsBasedWorker): - return compute_parquet_response( - dataset=self.dataset, - hf_endpoint=self.common_config.hf_endpoint, - hf_token=self.common_config.hf_token, - committer_hf_token=self.parquet_config.committer_hf_token, - source_revision=self.parquet_config.source_revision, - target_revision=self.parquet_config.target_revision, - commit_message=self.parquet_config.commit_message, - url_template=self.parquet_config.url_template, - supported_datasets=self.parquet_config.supported_datasets, - blocked_datasets=self.parquet_config.blocked_datasets, - max_dataset_size=self.parquet_config.max_dataset_size, - ) + return compute_parquet_response(dataset=self.dataset) diff --git a/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py new file mode 100644 index 00000000..7205ec2f --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/parquet_and_dataset_info.py @@ -0,0 +1,685 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import contextlib +import glob +import logging +import re +from http import HTTPStatus +from pathlib import Path +from typing import Any, List, Literal, Mapping, Optional, Tuple, TypedDict +from urllib.parse import quote + +import datasets +import datasets.config +from datasets import get_dataset_config_names, get_dataset_infos, load_dataset_builder +from datasets.data_files import EmptyDatasetError as _EmptyDatasetError +from datasets.utils.py_utils import asdict +from huggingface_hub.hf_api import ( + CommitOperation, + CommitOperationAdd, + CommitOperationDelete, + DatasetInfo, + HfApi, + RepoFile, +) +from huggingface_hub.utils import RepositoryNotFoundError, RevisionNotFoundError +from libcommon.dataset import ask_access +from libcommon.exceptions import CustomError +from libcommon.simple_cache import SplitFullName + +from datasets_based.config import AppConfig, ParquetAndDatasetInfoConfig +from datasets_based.worker import DatasetNotFoundError, JobInfo +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker + +ParquetAndDatasetInfoWorkerErrorCode = Literal[ + "DatasetRevisionNotFoundError", + "EmptyDatasetError", + "ConfigNamesError", + "DatasetInBlockListError", + "DatasetTooBigFromHubError", + "DatasetTooBigFromDatasetsError", +] + + +class ParquetAndDatasetInfoWorkerError(CustomError): + """Base class for exceptions in this module.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: ParquetAndDatasetInfoWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__(message, status_code, str(code), cause, disclose_cause) + + +class DatasetRevisionNotFoundError(ParquetAndDatasetInfoWorkerError): + """Raised when the revision of a dataset repository does not exist.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_FOUND, "DatasetRevisionNotFoundError", cause, False) + + +class ConfigNamesError(ParquetAndDatasetInfoWorkerError): + """Raised when the configuration names could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "ConfigNamesError", cause, True) + + +class EmptyDatasetError(ParquetAndDatasetInfoWorkerError): + """Raised when the dataset has no data.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) + + +class DatasetInBlockListError(ParquetAndDatasetInfoWorkerError): + """Raised when the dataset is in the list of blocked datasets.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetInBlockListError", cause, False) + + +class DatasetTooBigFromHubError(ParquetAndDatasetInfoWorkerError): + """Raised when the dataset size (sum of files on the Hub) is too big.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetTooBigFromHubError", cause, False) + + +class DatasetTooBigFromDatasetsError(ParquetAndDatasetInfoWorkerError): + """Raised when the dataset size (sum of config sizes given by the datasets library) is too big.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetTooBigFromDatasetsError", cause, False) + + +class ParquetFileItem(TypedDict): + dataset: str + config: str + split: str + url: str + filename: str + size: int + + +class ParquetAndDatasetInfoResponse(TypedDict): + parquet_files: List[ParquetFileItem] + dataset_info: dict[str, Any] + + +DATASET_TYPE = "dataset" + + +class ParquetFile: + def __init__(self, local_file: str, local_dir: str, config: str): + if not local_file.startswith(local_dir): + raise ValueError(f"{local_file} is not in {local_dir}") + self.local_file = local_file + self.local_dir = local_dir + self.config = config + + def repo_file(self) -> str: + return f'{self.config}/{self.local_file.removeprefix(f"{self.local_dir}/")}' + + +# TODO: use huggingface_hub's hf_hub_url after +# https://github.com/huggingface/huggingface_hub/issues/1082 +def hf_hub_url(repo_id: str, filename: str, hf_endpoint: str, revision: str, url_template: str) -> str: + return (hf_endpoint + url_template) % (repo_id, quote(revision, safe=""), filename) + + +p = re.compile(r"[\w]+-(?P<split>[\w]+?)(-[0-9]{5}-of-[0-9]{5})?.parquet") + + +def parse_repo_filename(filename: str) -> Tuple[str, str]: + parts = filename.split("/") + if len(parts) != 2: + raise ValueError(f"Invalid filename: {filename}") + config, fname = parts + m = p.match(fname) + if not m: + raise ValueError(f"Cannot parse {filename}") + split = m.group("split") + return config, split + + +def create_parquet_file_item( + repo_file: RepoFile, + dataset: str, + hf_endpoint: str, + target_revision: str, + url_template: str, +) -> ParquetFileItem: + if repo_file.size is None: + raise ValueError(f"Cannot get size of {repo_file.rfilename}") + config, split = parse_repo_filename(repo_file.rfilename) + return { + "dataset": dataset, + "config": config, + "split": split, + "url": hf_hub_url( + repo_id=dataset, + filename=repo_file.rfilename, + hf_endpoint=hf_endpoint, + revision=target_revision, + url_template=url_template, + ), + "filename": Path(repo_file.rfilename).name, + "size": repo_file.size, + } + + +def raise_if_blocked( + dataset: str, + blocked_datasets: List[str], +) -> None: + """ + Raise an error if the dataset is in the list of blocked datasets + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + blocked_datasets (`List[str]`): + The list of blocked datasets. If empty, no dataset is blocked. + Returns: + `None` + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + </Tip> + """ + if dataset in blocked_datasets: + raise DatasetInBlockListError( + "The parquet conversion has been disabled for this dataset for now. Please open an issue in" + " https://github.com/huggingface/datasets-server if you want this dataset to be supported." + ) + + +def get_dataset_info_or_raise( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + revision: str, +) -> DatasetInfo: + """ + Return the dataset info if possible. + Raise an error if the dataset cannot be accessed (does not exist, gated with extra fields, private) + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + revision (`str`): + The git revision (e.g. "main" or sha) of the dataset + Returns: + `DatasetInfo`: The dataset info + <Tip> + Raises the following errors: + - [`~libcommon.worker.DatasetNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + - [`~parquet.worker.DatasetRevisionNotFoundError`] + If the revision does not exist or cannot be accessed using the token. + </Tip> + """ + try: + dataset_info = HfApi(endpoint=hf_endpoint, token=hf_token).dataset_info( + repo_id=dataset, revision=revision, files_metadata=True + ) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + except RevisionNotFoundError as err: + raise DatasetRevisionNotFoundError("The dataset revision does not exist on the Hub.") from err + return dataset_info + + +def raise_if_too_big_from_hub( + dataset_info: DatasetInfo, + max_dataset_size: int, +) -> None: + """ + Raise an error if the dataset is too big to be converted to parquet + + Args: + dataset_info (`DatasetInfo`): + The dataset info + max_dataset_size (`int`): + The maximum size of the dataset in bytes + Returns: + `None` + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetTooBigFromHubError`] + If the dataset is too big to be converted to parquet + </Tip> + """ + dataset_size: int = sum(sibling.size for sibling in dataset_info.siblings if sibling.size is not None) + if dataset_size > max_dataset_size: + raise DatasetTooBigFromHubError( + f"The conversion to parquet is limited to datasets under {max_dataset_size} bytes. " + f"Current size of files on the hub is {dataset_size} bytes." + ) + + +def raise_if_too_big_from_datasets( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + revision: str, + max_dataset_size: int, +) -> None: + """ + Raise an error if the dataset is too big to be converted to parquet, as measured by the sum of the configs + sizes given by the datasets library + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + revision (`str`): + The git revision (e.g. "main" or sha) of the dataset + max_dataset_size (`int`): + The maximum size of the dataset in bytes + Returns: + `None` + <Tip> + Raises the following errors: + - [`ValueError`] + If the datasets.config.HF_ENDPOINT is not set to the expected value + - [`~parquet.worker.DatasetTooBigFromDatasetsError`] + If the dataset is too big to be converted to parquet + </Tip> + """ + if datasets.config.HF_ENDPOINT != hf_endpoint: + raise ValueError( + "datasets.config.HF_ENDPOINT should have already been set to {hf_endpoint}. " + f"Current value: {datasets.config.HF_ENDPOINT}. " + ) + dataset_size = 0 + with contextlib.suppress(Exception): + infos = get_dataset_infos(path=dataset, revision=revision, use_auth_token=hf_token) + dataset_size = sum(value.dataset_size for value in infos.values() if value.dataset_size is not None) + if dataset_size > max_dataset_size: + raise DatasetTooBigFromDatasetsError( + f"The conversion to parquet is limited to datasets under {max_dataset_size} bytes. " + f"Current size as given per the datasets library is {dataset_size} bytes." + ) + + +def raise_if_not_supported( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + committer_hf_token: Optional[str], + revision: str, + supported_datasets: List[str], + blocked_datasets: List[str], + max_dataset_size: int, +) -> None: + """ + Raise an error if the dataset is not supported: + - if the dataset is in the list of blocked datasets + - if the dataset cannot be accessed (does not exist, gated with extra fields, private) + - if the dataset is too big, and not in the list of supported datasets + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + committer_hf_token (`str`, `optional`): + A user authentication token (See https://huggingface.co/settings/token) with write access. It must: + - be part of the `huggingface` organization (to create the ref/convert/parquet "branch") + - be part of the `datasets-maintainers` organization (to push to the ref/convert/parquet "branch") + revision (`str`): + The git revision (e.g. "main" or sha) of the dataset + supported_datasets (`List[str]`): + The list of supported datasets, saving the blocked datasets. If empty, all datasets are supported + (saving the blocked datasets). + blocked_datasets (`List[str]`): + The list of blocked datasets. If empty, no dataset is blocked. + max_dataset_size (`int`): + The maximum size of a dataset in bytes. If the dataset is under the limit (which means that the size + can be fetched), it will be allowed. + Returns: + `ParquetResponseResult`: An object with the parquet_response + (dataset and list of parquet files) and the dataset_git_revision (sha) if any. + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) + - ['~requests.exceptions.HTTPError']: any other error when asking access + - [`~parquet.worker.DatasetRevisionNotFoundError`] + If the revision does not exist or cannot be accessed using the token. + - [`~parquet.worker.DatasetTooBigFromHubError`] + If the dataset is too big to be converted to parquet + - [`ValueError`] + If the datasets.config.HF_ENDPOINT is not set to the expected value + - [`~parquet.worker.DatasetTooBigFromDatasetsError`] + If the dataset is too big to be converted to parquet + </Tip> + """ + raise_if_blocked(dataset=dataset, blocked_datasets=blocked_datasets) + ask_access(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=committer_hf_token) + dataset_info = get_dataset_info_or_raise( + dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, revision=revision + ) + if dataset in supported_datasets: + return + raise_if_too_big_from_datasets( + dataset=dataset, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + revision=revision, + max_dataset_size=max_dataset_size, + ) + raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=max_dataset_size) + + +class EmptySplitsError(Exception): + pass + + +class SplitInfoFormatError(Exception): + pass + + +class EmptyConfigNameError(Exception): + pass + + +class EmptyDownloadSizeError(Exception): + pass + + +class EmptyFeaturesError(Exception): + pass + + +# def dataset_info_to_splits_response(dataset: str, config_infos: List[DatasetInfo]): +# split_items: List[SplitItem] = [] +# for config_info in config_infos: +# config = config_info.config_name +# if config is None: +# raise EmptyConfigNameError(f"Dataset info for dataset='{dataset}' has no config name.") +# if config_info.splits is None: +# raise EmptySplitsError(f"Dataset info for dataset='{dataset}', config='{config}' has no splits.") +# if config_info.download_size is None: +# raise EmptyDownloadSizeError( +# f"Dataset info for dataset='{dataset}', config='{config}' has no download_size." +# ) +# if config_info.features is None: +# raise EmptyFeaturesError(f"Dataset info for dataset='{dataset}', config='{config}' has no features.") +# for split_info in config_info.splits.values(): +# if not isinstance(split_info, SplitInfo): +# raise SplitInfoFormatError( +# f"Split info for dataset='{dataset}', config='{config}' has an unknown format." +# ) +# split = split_info.name +# split_items.append( +# # {'train': SplitInfo(name='train', num_bytes=148581, num_examples=569, shard_lengths=None, +# #dataset_name='csv')} +# { +# "dataset": dataset, +# "config": config, +# "split": split, +# "stats": { +# "config_download_size": config_info.download_size, +# "parquet_size": split_info.num_bytes, +# "num_examples": split_info.num_examples, +# "num_columns": len(config_info.features), +# TODO: shard? +# }, +# "links": { +# ... +# } +# } +# ) + +# # # original_size +# # # parquet_size +# # # num_rows +# # # num_columns +# # # links to: columns (features), first-rows, parquet files +# # config_info: Dict[str, DatasetInfo] = {} + + +def compute_parquet_and_dataset_info_response( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + committer_hf_token: Optional[str], + source_revision: str, + target_revision: str, + commit_message: str, + url_template: str, + supported_datasets: List[str], + blocked_datasets: List[str], + max_dataset_size: int, +) -> ParquetAndDatasetInfoResponse: + """ + Get the response of /parquet-and-dataset-info for one specific dataset on huggingface.co. + It is assumed that the dataset can be accessed with the token. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + committer_hf_token (`str`, `optional`): + A user authentication token (See https://huggingface.co/settings/token) with write access. It must: + - be part of the `huggingface` organization (to create the ref/convert/parquet "branch") + - be part of the `datasets-maintainers` organization (to push to the ref/convert/parquet "branch") + source_revision (`str`): + The git revision (e.g. "main" or sha) of the dataset used to prepare the parquet files + target_revision (`str`): + The target git revision (e.g. "ref/convert/parquet") of the dataset where to store the parquet files + commit_message (`str`): + The commit message to use when storing the parquet files + url_template (`str`): + The template to use to build the parquet file url + supported_datasets (`List[str]`): + The list of supported datasets, saving the blocked datasets. If empty, all datasets are supported + (saving the blocked datasets). + blocked_datasets (`List[str]`): + The list of blocked datasets. If empty, no dataset is blocked. + max_dataset_size (`int`): + The maximum size of a dataset in bytes. If the dataset is under the limit (which means that the size + can be fetched), it will be allowed. + Returns: + `ParquetAndDatasetInfoResponse`: An object with the parquet_and_dataset_info_response + (dataset info and list of parquet files). + <Tip> + Raises the following errors: + - [`~parquet_and_dataset_info.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) + - ['~requests.exceptions.HTTPError']: any other error when asking access + - [`~parquet_and_dataset_info.worker.DatasetRevisionNotFoundError`] + If the revision does not exist or cannot be accessed using the token. + - [`~parquet_and_dataset_info.worker.DatasetTooBigFromHubError`] + If the dataset is too big to be converted to parquet + - [`ValueError`] + If the datasets.config.HF_ENDPOINT is not set to the expected value + - [`~parquet_and_dataset_info.worker.DatasetTooBigFromDatasetsError`] + If the dataset is too big to be converted to parquet + - [`~parquet_and_dataset_info.worker.EmptyDatasetError`] + The dataset is empty. + - [`~parquet_and_dataset_info.worker.ConfigNamesError`] + If the list of configurations could not be obtained using the datasets library. + - [`~parquet_and_dataset_info.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + </Tip> + """ + logging.info(f"get parquet files and dataset info for dataset={dataset}") + + raise_if_not_supported( + dataset=dataset, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + committer_hf_token=committer_hf_token, + revision=source_revision, + supported_datasets=supported_datasets, + blocked_datasets=blocked_datasets, + max_dataset_size=max_dataset_size, + ) + + hf_api = HfApi(endpoint=hf_endpoint, token=hf_token) + committer_hf_api = HfApi(endpoint=hf_endpoint, token=committer_hf_token) + + # get the sorted list of configurations + try: + config_names = sorted( + str(config) + for config in get_dataset_config_names(path=dataset, revision=source_revision, use_auth_token=hf_token) + ) + except _EmptyDatasetError as err: + raise EmptyDatasetError("The dataset is empty.", cause=err) from err + except Exception as err: + raise ConfigNamesError("Cannot get the configuration names for the dataset.", cause=err) from err + + # prepare the parquet files locally + parquet_files: List[ParquetFile] = [] + dataset_info: dict[str, Any] = {} + for config in config_names: + # TODO: run the loop in parallel, in different workers? with dagster? + builder = load_dataset_builder(path=dataset, name=config, revision=source_revision, use_auth_token=hf_token) + builder.download_and_prepare(file_format="parquet") # the parquet files are stored in the cache dir + dataset_info[config] = asdict(builder.info) + # ^ see + # https://github.dev/huggingface/datasets/blob/e183a269067575db8765ee979bd8523d14a1adae/src/datasets/info.py#L244-L245 + parquet_files.extend( + ParquetFile(local_file=local_file, local_dir=builder.cache_dir, config=config) + for local_file in glob.glob(f"{builder.cache_dir}**/*.parquet") + ) + + # create the target revision if it does not exist yet + try: + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + except RevisionNotFoundError: + # create the parquet_ref (refs/convert/parquet) + committer_hf_api.create_branch(repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE) + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + + # delete: + # - the previous files, + previous_files = {f.rfilename for f in target_dataset_info.siblings} + # except: + # - the files we will update, + files_to_add = {parquet_file.repo_file(): parquet_file.local_file for parquet_file in parquet_files} + # - .gitattributes if present. + files_to_delete = previous_files - set(files_to_add.keys()).union({".gitattributes"}) + delete_operations: List[CommitOperation] = [CommitOperationDelete(path_in_repo=file) for file in files_to_delete] + logging.debug(f"delete_operations={delete_operations}") + + # send the files to the target revision + add_operations: List[CommitOperation] = [ + CommitOperationAdd(path_in_repo=file, path_or_fileobj=local_file) + for (file, local_file) in files_to_add.items() + ] + logging.debug(f"add_operations={add_operations}") + + committer_hf_api.create_commit( + repo_id=dataset, + repo_type=DATASET_TYPE, + revision=target_revision, + operations=delete_operations + add_operations, + commit_message=commit_message, + parent_commit=target_dataset_info.sha, + ) + + # call the API again to get the list of parquet files + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=True) + repo_files = [repo_file for repo_file in target_dataset_info.siblings if repo_file.rfilename.endswith(".parquet")] + # we might want to check if the sha of the parquet files is the same as the one we just uploaded + # we could also check that the list of parquet files is exactly what we expect + # let's not over engineer this for now. After all, what is on the Hub is the source of truth + # and the /parquet response is more a helper to get the list of parquet files + return { + "parquet_files": [ + create_parquet_file_item( + repo_file=repo_file, + dataset=dataset, + hf_endpoint=hf_endpoint, + target_revision=target_revision, + url_template=url_template, + ) + for repo_file in repo_files + ], + "dataset_info": dataset_info, + } + + +class ParquetAndDatasetInfoWorker(DatasetsBasedWorker): + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig + + @staticmethod + def get_job_type() -> str: + return "/parquet-and-dataset-info" + + @staticmethod + def get_version() -> str: + return "2.0.0" + + def __init__( + self, job_info: JobInfo, app_config: AppConfig, parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig + ) -> None: + super().__init__(job_info=job_info, app_config=app_config) + self.parquet_and_dataset_info_config = parquet_and_dataset_info_config + + def compute(self) -> Mapping[str, Any]: + return compute_parquet_and_dataset_info_response( + dataset=self.dataset, + hf_endpoint=self.common_config.hf_endpoint, + hf_token=self.common_config.hf_token, + committer_hf_token=self.parquet_and_dataset_info_config.committer_hf_token, + source_revision=self.parquet_and_dataset_info_config.source_revision, + target_revision=self.parquet_and_dataset_info_config.target_revision, + commit_message=self.parquet_and_dataset_info_config.commit_message, + url_template=self.parquet_and_dataset_info_config.url_template, + supported_datasets=self.parquet_and_dataset_info_config.supported_datasets, + blocked_datasets=self.parquet_and_dataset_info_config.blocked_datasets, + max_dataset_size=self.parquet_and_dataset_info_config.max_dataset_size, + ) + + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return { + SplitFullName(dataset=parquet_file["dataset"], config=parquet_file["config"], split=parquet_file["split"]) + for parquet_file in content["parquet_files"] + } diff --git a/workers/datasets_based/src/datasets_based/workers/splits.py b/workers/datasets_based/src/datasets_based/workers/splits.py index 81fa18cd..7c867788 100644 --- a/workers/datasets_based/src/datasets_based/workers/splits.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -6,8 +6,3 @@ from http import HTTPStatus -from typing import Any, Dict, List, Literal, Mapping, Optional, TypedDict, Union - -from datasets import ( - DatasetInfo, - get_dataset_config_info, - get_dataset_config_names, - get_dataset_split_names, -) +from typing import Any, List, Literal, Mapping, Optional, TypedDict, Union + +from datasets import get_dataset_config_names, get_dataset_split_names @@ -16 +11 @@ from libcommon.exceptions import CustomError -from libcommon.simple_cache import SplitFullName as _SplitFullName +from libcommon.simple_cache import SplitFullName @@ -56 +51 @@ class EmptyDatasetError(SplitWorkerError): -class SplitFullName(TypedDict): +class SplitItem(TypedDict): @@ -62,5 +56,0 @@ class SplitFullName(TypedDict): -class SplitItem(SplitFullName): - num_bytes: Optional[int] - num_examples: Optional[int] - - @@ -71 +61 @@ class SplitsResponseContent(TypedDict): -def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, None] = False) -> List[SplitFullName]: +def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, None] = False) -> List[SplitItem]: @@ -81 +71 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - List[SplitFullName]: a list of splits full names: objects with the keys `dataset`, `config` and `split`. They + List[SplitItem]: a list of splits full names: objects with the keys `dataset`, `config` and `split`. They @@ -102,0 +93,5 @@ def compute_splits_response( + The list of splits might require the dataset to support the streaming mode. See + https://github.dev/huggingface/datasets/blob/e183a269067575db8765ee979bd8523d14a1adae/src/datasets/inspect.py#L389-L390 + + The /splits response generated by this function does not include the optional "stats" field. See ./parquet.py + @@ -112,2 +107 @@ def compute_splits_response( - `SplitsResponseResult`: An object with the splits_response - (list of splits names) and the dataset_git_revision (sha) if any. + `SplitsResponseContent`: An object with the list of split names. @@ -124 +118 @@ def compute_splits_response( - # get the list of splits + # get the list of splits in streaming mode @@ -126 +120 @@ def compute_splits_response( - split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) + split_items = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) @@ -131,29 +125 @@ def compute_splits_response( - # get the number of bytes and examples for each split - config_info: Dict[str, DatasetInfo] = {} - split_items: List[SplitItem] = [] - for split_full_name in split_full_names: - dataset = split_full_name["dataset"] - config = split_full_name["config"] - split = split_full_name["split"] - try: - if config not in config_info: - config_info[config] = get_dataset_config_info( - path=dataset, - config_name=config, - use_auth_token=use_auth_token, - ) - info = config_info[config] - num_bytes = info.splits[split].num_bytes if info.splits else None - num_examples = info.splits[split].num_examples if info.splits else None - except Exception: - num_bytes = None - num_examples = None - split_items.append( - { - "dataset": dataset, - "config": config, - "split": split, - "num_bytes": num_bytes, - "num_examples": num_examples, - } - ) + # As a rule, null values should have their fields removed -> "stats" field is not included @@ -175 +141 @@ class SplitsWorker(DatasetsBasedWorker): - def get_new_splits(self, content: Mapping[str, Any]) -> set[_SplitFullName]: + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: @@ -177 +143 @@ class SplitsWorker(DatasetsBasedWorker): - return {_SplitFullName(dataset=s["dataset"], config=s["config"], split=s["split"]) for s in content["splits"]} + return {SplitFullName(dataset=s["dataset"], config=s["config"], split=s["split"]) for s in content["splits"]} diff --git a/workers/datasets_based/tests/conftest.py b/workers/datasets_based/tests/conftest.py index 78bcc47e..476ecb04 100644 --- a/workers/datasets_based/tests/conftest.py +++ b/workers/datasets_based/tests/conftest.py @@ -6,0 +7,2 @@ from typing import Iterator +from libcommon.config import CacheConfig, QueueConfig +from libcommon.processing_graph import ProcessingStep @@ -48,2 +50,2 @@ def set_env_vars(datasets_cache_directory: Path, modules_cache_directory: Path) - mp.setenv("PARQUET_MAX_DATASET_SIZE", "10_000") - mp.setenv("PARQUET_COMMITTER_HF_TOKEN", CI_USER_TOKEN) + mp.setenv("PARQUET_AND_DATASET_INFO_MAX_DATASET_SIZE", "10_000") + mp.setenv("PARQUET_AND_DATASET_INFO_COMMITTER_HF_TOKEN", CI_USER_TOKEN) @@ -75,0 +78,29 @@ pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixt + + +@fixture() +def test_processing_step() -> ProcessingStep: + return ProcessingStep( + endpoint="/dummy", + input_type="dataset", + requires=None, + required_by_dataset_viewer=False, + parent=None, + ancestors=[], + children=[], + ) + + +@fixture() +def cache_config(app_config: AppConfig) -> CacheConfig: + cache_config = app_config.cache + if "test" not in cache_config.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return cache_config + + +@fixture() +def queue_config(app_config: AppConfig) -> QueueConfig: + queue_config = app_config.queue + if "test" not in queue_config.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return queue_config diff --git a/workers/datasets_based/tests/fixtures/datasets.py b/workers/datasets_based/tests/fixtures/datasets.py index 62ce66ef..af99eb29 100644 --- a/workers/datasets_based/tests/fixtures/datasets.py +++ b/workers/datasets_based/tests/fixtures/datasets.py @@ -64 +64 @@ def datasets() -> Mapping[str, Dataset]: - "string": value("a string", pd.StringDtype()), + "string": value("a string", pd.StringDtype(storage="python")), @@ -130 +130,3 @@ def datasets() -> Mapping[str, Dataset]: - "big": Dataset.from_pandas(pd.DataFrame({"col": ["a" * 1_234 for _ in range(4_567)]}, dtype=pd.StringDtype())), + "big": Dataset.from_pandas( + pd.DataFrame({"col": ["a" * 1_234 for _ in range(4_567)]}, dtype=pd.StringDtype(storage="python")) + ), diff --git a/workers/datasets_based/tests/fixtures/hub.py b/workers/datasets_based/tests/fixtures/hub.py index 14e598c5..79037ebb 100644 --- a/workers/datasets_based/tests/fixtures/hub.py +++ b/workers/datasets_based/tests/fixtures/hub.py @@ -9 +9 @@ from pathlib import Path -from typing import Any, Iterable, List, Mapping, Optional, Tuple, TypedDict +from typing import Any, Iterable, List, Literal, Mapping, Optional, Tuple, TypedDict @@ -129,0 +130 @@ def delete_hub_dataset_repo(repo_id: str) -> None: + @@ -207 +208 @@ class HubDatasetTest(TypedDict): - parquet_response: Any + parquet_and_dataset_info_response: Any @@ -213 +214 @@ HubDatasets = Mapping[str, HubDatasetTest] -def create_splits_response(dataset: str, num_bytes: float = None, num_examples: int = None): +def create_splits_response(dataset: str): @@ -221,2 +221,0 @@ def create_splits_response(dataset: str, num_bytes: float = None, num_examples: - "num_bytes": num_bytes, - "num_examples": num_examples, @@ -253 +252,44 @@ def create_first_rows_response(dataset: str, cols: Mapping[str, Any], rows: List -def create_parquet_response(dataset: str, filename: str, size: int): +def create_dataset_info_response_for_csv(dataset: str, config: str): + return { + "description": "", + "citation": "", + "homepage": "", + "license": "", + "features": DATA_cols, + "builder_name": "csv", + "config_name": config, + "version": {"version_str": "0.0.0", "major": 0, "minor": 0, "patch": 0}, + "splits": {"train": {"name": "train", "num_bytes": 96, "num_examples": 4, "dataset_name": "csv"}}, + "download_checksums": { + f"https://hub-ci.huggingface.co/datasets/{dataset}/resolve/__COMMIT__/dataset.csv": { + "num_bytes": 50, + "checksum": "441b6927a5442803821415bdcb0f418731b0d2a525a7f2e68ce0df0e95d444de", + } + }, + "download_size": 50, + "dataset_size": 96, + "size_in_bytes": 146, + } + + +def create_dataset_info_response_for_audio(dataset: str, config: str): + return { + "description": "", + "citation": "", + "homepage": "", + "license": "", + "features": AUDIO_cols, + "splits": {"train": {"name": "train", "num_bytes": 59, "num_examples": 1, "dataset_name": "parquet"}}, + "download_checksums": { + "SOME_KEY": { + "num_bytes": 1124, + "checksum": "3b630ef6ede66c5ced336df78fd99d98f835b459baadbe88a2cdf180709e9543", + } + }, + "download_size": 1124, + "dataset_size": 59, + "size_in_bytes": 1183, + } + + +def create_parquet_and_dataset_info_response(dataset: str, data_type: Literal["csv", "audio"]): @@ -254,0 +297,8 @@ def create_parquet_response(dataset: str, filename: str, size: int): + + filename = "csv-train.parquet" if data_type == "csv" else "parquet-train.parquet" + size = CSV_PARQUET_SIZE if data_type == "csv" else AUDIO_PARQUET_SIZE + info = ( + create_dataset_info_response_for_csv(dataset, config) + if data_type == "csv" + else create_dataset_info_response_for_audio(dataset, config) + ) @@ -267,0 +318 @@ def create_parquet_response(dataset: str, filename: str, size: int): + "dataset_info": {config: info}, @@ -393 +444 @@ def hub_datasets( - "parquet_response": None, + "parquet_and_dataset_info_response": None, @@ -399 +450 @@ def hub_datasets( - "parquet_response": None, + "parquet_and_dataset_info_response": None, @@ -403 +454 @@ def hub_datasets( - "splits_response": create_splits_response(hub_public_csv, None, None), + "splits_response": create_splits_response(hub_public_csv), @@ -405,2 +456,2 @@ def hub_datasets( - "parquet_response": create_parquet_response( - dataset=hub_public_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + "parquet_and_dataset_info_response": create_parquet_and_dataset_info_response( + dataset=hub_public_csv, data_type="csv" @@ -411 +462 @@ def hub_datasets( - "splits_response": create_splits_response(hub_private_csv, None, None), + "splits_response": create_splits_response(hub_private_csv), @@ -413,2 +464,2 @@ def hub_datasets( - "parquet_response": create_parquet_response( - dataset=hub_private_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + "parquet_and_dataset_info_response": create_parquet_and_dataset_info_response( + dataset=hub_private_csv, data_type="csv" @@ -419 +470 @@ def hub_datasets( - "splits_response": create_splits_response(hub_gated_csv, None, None), + "splits_response": create_splits_response(hub_gated_csv), @@ -421,2 +472,2 @@ def hub_datasets( - "parquet_response": create_parquet_response( - dataset=hub_gated_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + "parquet_and_dataset_info_response": create_parquet_and_dataset_info_response( + dataset=hub_gated_csv, data_type="csv" @@ -427 +478 @@ def hub_datasets( - "splits_response": create_splits_response(hub_public_jsonl, None, None), + "splits_response": create_splits_response(hub_public_jsonl), @@ -429 +480 @@ def hub_datasets( - "parquet_response": None, + "parquet_and_dataset_info_response": None, @@ -433 +484 @@ def hub_datasets( - "splits_response": create_splits_response(hub_gated_extra_fields_csv, None, None), + "splits_response": create_splits_response(hub_gated_extra_fields_csv), @@ -435,2 +486,2 @@ def hub_datasets( - "parquet_response": create_parquet_response( - dataset=hub_gated_extra_fields_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + "parquet_and_dataset_info_response": create_parquet_and_dataset_info_response( + dataset=hub_gated_extra_fields_csv, data_type="csv" @@ -441 +492 @@ def hub_datasets( - "splits_response": create_splits_response(hub_public_audio, 54.0, 1), + "splits_response": create_splits_response(hub_public_audio), @@ -445,2 +496,2 @@ def hub_datasets( - "parquet_response": create_parquet_response( - dataset=hub_public_audio, filename="parquet-train.parquet", size=AUDIO_PARQUET_SIZE + "parquet_and_dataset_info_response": create_parquet_and_dataset_info_response( + dataset=hub_public_audio, data_type="audio" @@ -451 +502 @@ def hub_datasets( - "splits_response": create_splits_response(hub_public_image, 0, 1), + "splits_response": create_splits_response(hub_public_image), @@ -455 +506 @@ def hub_datasets( - "parquet_response": None, + "parquet_and_dataset_info_response": None, @@ -459 +510 @@ def hub_datasets( - "splits_response": create_splits_response(hub_public_images_list, 0, 1), + "splits_response": create_splits_response(hub_public_images_list), @@ -463 +514 @@ def hub_datasets( - "parquet_response": None, + "parquet_and_dataset_info_response": None, @@ -467 +518 @@ def hub_datasets( - "splits_response": create_splits_response(hub_public_big, 0, 1), + "splits_response": create_splits_response(hub_public_big), @@ -469 +520 @@ def hub_datasets( - "parquet_response": None, + "parquet_and_dataset_info_response": None, diff --git a/libs/libcommon/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py similarity index 90% rename from libs/libcommon/tests/test_worker.py rename to workers/datasets_based/tests/test_worker.py index 9fed5fdb..200b2543 100644 --- a/libs/libcommon/tests/test_worker.py +++ b/workers/datasets_based/tests/test_worker.py @@ -4 +3,0 @@ import pytest - @@ -9 +8,2 @@ from libcommon.simple_cache import SplitFullName, _clean_cache_database -from libcommon.worker import Worker, parse_version + +from datasets_based.worker import Worker @@ -38,17 +37,0 @@ class DummyWorker(Worker): [email protected]( - "string_version, expected_major_version, should_raise", - [ - ("1.0.0", 1, False), - ("3.1.2", 3, False), - ("1.1", 1, False), - ("not a version", None, True), - ], -) -def test_parse_version(string_version: str, expected_major_version: int, should_raise: bool) -> None: - if should_raise: - with pytest.raises(Exception): - parse_version(string_version) - else: - assert parse_version(string_version).major == expected_major_version - - diff --git a/workers/datasets_based/tests/test_worker_factory.py b/workers/datasets_based/tests/test_worker_factory.py index d662bd4d..52b95bc8 100644 --- a/workers/datasets_based/tests/test_worker_factory.py +++ b/workers/datasets_based/tests/test_worker_factory.py @@ -7 +6,0 @@ import pytest -from libcommon.worker import JobInfo @@ -9,0 +9 @@ from datasets_based.config import AppConfig +from datasets_based.worker import JobInfo @@ -17,0 +18 @@ from datasets_based.worker_factory import DatasetBasedWorkerFactory + ("/parquet-and-dataset-info", "ParquetAndDatasetInfoWorker"), @@ -18,0 +20 @@ from datasets_based.worker_factory import DatasetBasedWorkerFactory + ("/dataset-info", "DatasetInfoWorker"), diff --git a/libs/libcommon/tests/test_worker_loop.py b/workers/datasets_based/tests/test_worker_loop.py similarity index 94% rename from libs/libcommon/tests/test_worker_loop.py rename to workers/datasets_based/tests/test_worker_loop.py index df6e8535..8acbf23d 100644 --- a/libs/libcommon/tests/test_worker_loop.py +++ b/workers/datasets_based/tests/test_worker_loop.py @@ -4 +3,0 @@ import pytest - @@ -9,2 +8,3 @@ from libcommon.simple_cache import _clean_cache_database -from libcommon.worker import JobInfo, Worker, WorkerFactory -from libcommon.worker_loop import WorkerLoop + +from datasets_based.worker import JobInfo, Worker, WorkerFactory +from datasets_based.worker_loop import WorkerLoop diff --git a/workers/datasets_based/tests/workers/test_dataset_info.py b/workers/datasets_based/tests/workers/test_dataset_info.py new file mode 100644 index 00000000..62f98d6d --- /dev/null +++ b/workers/datasets_based/tests/workers/test_dataset_info.py @@ -0,0 +1,85 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from http import HTTPStatus +from typing import Any + +import pytest +from libcommon.simple_cache import _clean_cache_database, upsert_response + +from datasets_based.config import AppConfig +from datasets_based.workers.dataset_info import ( + DatasetInfoWorker, + DatasetNotFoundError, + PreviousStepFormatError, + PreviousStepStatusError, +) + + [email protected](autouse=True) +def clean_mongo_database(app_config: AppConfig) -> None: + _clean_cache_database() + + +def get_worker(dataset: str, app_config: AppConfig, force: bool = False) -> DatasetInfoWorker: + return DatasetInfoWorker( + job_info={ + "type": DatasetInfoWorker.get_job_type(), + "dataset": dataset, + "config": None, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + ) + + [email protected]( + "dataset,upstream_status,upstream_content,expected_error_code,expected_content,should_raise", + [ + ( + "ok", + HTTPStatus.OK, + {"parquet_files": [{"key": "value"}], "dataset_info": {"key": "value"}}, + None, + {"dataset_info": {"key": "value"}}, + False, + ), + ("status_error", HTTPStatus.NOT_FOUND, {"error": "error"}, PreviousStepStatusError.__name__, None, True), + ( + "format_error", + HTTPStatus.OK, + {"not_dataset_info": "wrong_format"}, + PreviousStepFormatError.__name__, + None, + True, + ), + ], +) +def test_compute( + app_config: AppConfig, + dataset: str, + upstream_status: HTTPStatus, + upstream_content: Any, + expected_error_code: str, + expected_content: Any, + should_raise: bool, +) -> None: + upsert_response( + kind="/parquet-and-dataset-info", dataset=dataset, content=upstream_content, http_status=upstream_status + ) + worker = get_worker(dataset=dataset, app_config=app_config) + if should_raise: + with pytest.raises(Exception) as e: + worker.compute() + assert e.type.__name__ == expected_error_code + else: + assert worker.compute() == expected_content + + +def test_doesnotexist(app_config: AppConfig) -> None: + dataset = "doesnotexist" + worker = get_worker(dataset=dataset, app_config=app_config) + with pytest.raises(DatasetNotFoundError): + worker.compute() diff --git a/workers/datasets_based/tests/workers/test_parquet.py b/workers/datasets_based/tests/workers/test_parquet.py index 23d420cb..7e644e94 100644 --- a/workers/datasets_based/tests/workers/test_parquet.py +++ b/workers/datasets_based/tests/workers/test_parquet.py @@ -4 +3,0 @@ -import io @@ -6 +5 @@ from http import HTTPStatus -from typing import Iterator, List +from typing import Any @@ -8 +6,0 @@ from typing import Iterator, List -import pandas as pd @@ -10,3 +8 @@ import pytest -import requests -from libcommon.exceptions import CustomError -from libcommon.simple_cache import DoesNotExist, get_response +from libcommon.simple_cache import _clean_cache_database, upsert_response @@ -14 +10 @@ from libcommon.simple_cache import DoesNotExist, get_response -from datasets_based.config import AppConfig, ParquetConfig +from datasets_based.config import AppConfig @@ -16,3 +12 @@ from datasets_based.workers.parquet import ( - DatasetInBlockListError, - DatasetTooBigFromDatasetsError, - DatasetTooBigFromHubError, + DatasetNotFoundError, @@ -20,6 +14,2 @@ from datasets_based.workers.parquet import ( - get_dataset_info_or_raise, - parse_repo_filename, - raise_if_blocked, - raise_if_not_supported, - raise_if_too_big_from_datasets, - raise_if_too_big_from_hub, + PreviousStepFormatError, + PreviousStepStatusError, @@ -28 +17,0 @@ from datasets_based.workers.parquet import ( -from ..fixtures.hub import HubDatasets @@ -30,20 +19,3 @@ from ..fixtures.hub import HubDatasets - -# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 [email protected](scope="module", autouse=True) -def set_supported_datasets(hub_datasets: HubDatasets) -> Iterator[pytest.MonkeyPatch]: - mp = pytest.MonkeyPatch() - mp.setenv( - "PARQUET_BLOCKED_DATASETS", - ",".join(value["name"] for value in hub_datasets.values() if "jsonl" in value["name"]), - ) - mp.setenv( - "PARQUET_SUPPORTED_DATASETS", - ",".join(value["name"] for value in hub_datasets.values() if "big" not in value["name"]), - ) - yield mp - mp.undo() - - [email protected] -def parquet_config(set_env_vars: pytest.MonkeyPatch, set_supported_datasets: pytest.MonkeyPatch) -> ParquetConfig: - return ParquetConfig.from_env() [email protected](autouse=True) +def clean_mongo_database(app_config: AppConfig) -> None: + _clean_cache_database() @@ -52,6 +24 @@ def parquet_config(set_env_vars: pytest.MonkeyPatch, set_supported_datasets: pyt -def get_worker( - dataset: str, - app_config: AppConfig, - parquet_config: ParquetConfig, - force: bool = False, -) -> ParquetWorker: +def get_worker(dataset: str, app_config: AppConfig, force: bool = False) -> ParquetWorker: @@ -68 +34,0 @@ def get_worker( - parquet_config=parquet_config, @@ -72,22 +37,0 @@ def get_worker( -def test_compute(app_config: AppConfig, parquet_config: ParquetConfig, hub_datasets: HubDatasets) -> None: - dataset = hub_datasets["public"]["name"] - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - assert worker.process() is True - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) - assert cached_response["http_status"] == HTTPStatus.OK - assert cached_response["error_code"] is None - assert cached_response["worker_version"] == worker.get_version() - assert cached_response["dataset_git_revision"] is not None - content = cached_response["content"] - assert len(content["parquet_files"]) == 1 - assert content == hub_datasets["public"]["parquet_response"] - - -def test_doesnotexist(app_config: AppConfig, parquet_config: ParquetConfig) -> None: - dataset = "doesnotexist" - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - assert worker.process() is False - with pytest.raises(DoesNotExist): - get_response(kind=worker.processing_step.cache_kind, dataset=dataset) - - @@ -95 +39 @@ def test_doesnotexist(app_config: AppConfig, parquet_config: ParquetConfig) -> N - "dataset,blocked,raises", + "dataset,upstream_status,upstream_content,expected_error_code,expected_content,should_raise", @@ -97,4 +41,17 @@ def test_doesnotexist(app_config: AppConfig, parquet_config: ParquetConfig) -> N - ("public", ["public"], True), - ("public", ["public", "audio"], True), - ("public", ["audio"], False), - ("public", [], False), + ( + "ok", + HTTPStatus.OK, + {"parquet_files": [{"key": "value"}], "dataset_info": {"key": "value"}}, + None, + {"parquet_files": [{"key": "value"}]}, + False, + ), + ("status_error", HTTPStatus.NOT_FOUND, {"error": "error"}, PreviousStepStatusError.__name__, None, True), + ( + "format_error", + HTTPStatus.OK, + {"not_parquet_files": "wrong_format"}, + PreviousStepFormatError.__name__, + None, + True, + ), @@ -103,14 +60,8 @@ def test_doesnotexist(app_config: AppConfig, parquet_config: ParquetConfig) -> N -def test_raise_if_blocked(dataset: str, blocked: List[str], raises: bool) -> None: - if raises: - with pytest.raises(DatasetInBlockListError): - raise_if_blocked(dataset=dataset, blocked_datasets=blocked) - else: - raise_if_blocked(dataset=dataset, blocked_datasets=blocked) - - [email protected]( - "name,raises", - [("public", False), ("big", True)], -) -def test_raise_if_too_big_from_hub( - hub_datasets: HubDatasets, name: str, raises: bool, app_config: AppConfig, parquet_config: ParquetConfig +def test_compute( + app_config: AppConfig, + dataset: str, + upstream_status: HTTPStatus, + upstream_content: Any, + expected_error_code: str, + expected_content: Any, + should_raise: bool, @@ -118,6 +69,2 @@ def test_raise_if_too_big_from_hub( - dataset = hub_datasets[name]["name"] - dataset_info = get_dataset_info_or_raise( - dataset=dataset, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - revision="main", + upsert_response( + kind="/parquet-and-dataset-info", dataset=dataset, content=upstream_content, http_status=upstream_status @@ -125,56 +72,5 @@ def test_raise_if_too_big_from_hub( - if raises: - with pytest.raises(DatasetTooBigFromHubError): - raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=parquet_config.max_dataset_size) - else: - raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=parquet_config.max_dataset_size) - - [email protected]( - "name,raises", - [("public", False), ("big", True)], -) -def test_raise_if_too_big_from_datasets( - hub_datasets: HubDatasets, name: str, raises: bool, app_config: AppConfig, parquet_config: ParquetConfig -) -> None: - dataset = hub_datasets[name]["name"] - if raises: - with pytest.raises(DatasetTooBigFromDatasetsError): - raise_if_too_big_from_datasets( - dataset=dataset, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - revision="main", - max_dataset_size=parquet_config.max_dataset_size, - ) - else: - raise_if_too_big_from_datasets( - dataset=dataset, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - revision="main", - max_dataset_size=parquet_config.max_dataset_size, - ) - - [email protected]( - "in_list,raises", - [ - (True, False), - (False, True), - ], -) -def test_raise_if_not_supported( - hub_public_big: str, app_config: AppConfig, parquet_config: ParquetConfig, in_list: bool, raises: bool -) -> None: - if raises: - with pytest.raises(DatasetTooBigFromDatasetsError): - raise_if_not_supported( - dataset=hub_public_big, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - committer_hf_token=parquet_config.committer_hf_token, - revision="main", - max_dataset_size=parquet_config.max_dataset_size, - supported_datasets=[hub_public_big] if in_list else ["another_dataset"], - blocked_datasets=[], - ) + worker = get_worker(dataset=dataset, app_config=app_config) + if should_raise: + with pytest.raises(Exception) as e: + worker.compute() + assert e.type.__name__ == expected_error_code @@ -182,10 +78 @@ def test_raise_if_not_supported( - raise_if_not_supported( - dataset=hub_public_big, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - committer_hf_token=parquet_config.committer_hf_token, - revision="main", - max_dataset_size=parquet_config.max_dataset_size, - supported_datasets=[hub_public_big] if in_list else ["another_dataset"], - blocked_datasets=[], - ) + assert worker.compute() == expected_content @@ -194,93 +81,4 @@ def test_raise_if_not_supported( -def test_not_supported_if_big(app_config: AppConfig, parquet_config: ParquetConfig, hub_public_big: str) -> None: - # Not in the list of supported datasets and bigger than the maximum size - dataset = hub_public_big - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - assert worker.process() is False - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) - assert cached_response["http_status"] == HTTPStatus.NOT_IMPLEMENTED - assert cached_response["error_code"] == "DatasetTooBigFromDatasetsError" - - -def test_supported_if_gated(app_config: AppConfig, parquet_config: ParquetConfig, hub_gated_csv: str) -> None: - # Access should must be granted - dataset = hub_gated_csv - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - assert worker.process() is True - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) - assert cached_response["http_status"] == HTTPStatus.OK - assert cached_response["error_code"] is None - - -def test_not_supported_if_gated_with_extra_fields( - app_config: AppConfig, parquet_config: ParquetConfig, hub_gated_extra_fields_csv: str -) -> None: - # Access request should fail because extra fields in gated datasets are not supported - dataset = hub_gated_extra_fields_csv - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - assert worker.process() is False - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) - assert cached_response["http_status"] == HTTPStatus.NOT_FOUND - assert cached_response["error_code"] == "GatedExtraFieldsError" - - -def test_blocked(app_config: AppConfig, parquet_config: ParquetConfig, hub_public_jsonl: str) -> None: - # In the list of blocked datasets - dataset = hub_public_jsonl - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - assert worker.process() is False - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) - assert cached_response["http_status"] == HTTPStatus.NOT_IMPLEMENTED - assert cached_response["error_code"] == "DatasetInBlockListError" - - [email protected]( - "name", - ["public", "audio", "gated"], -) -def test_compute_splits_response_simple_csv_ok( - hub_datasets: HubDatasets, name: str, app_config: AppConfig, parquet_config: ParquetConfig, data_df: pd.DataFrame -) -> None: - dataset = hub_datasets[name]["name"] - expected_parquet_response = hub_datasets[name]["parquet_response"] - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - result = worker.compute() - assert result == expected_parquet_response - - # download the parquet file and check that it is valid - if name == "audio": - return - - if name == "public": - df = pd.read_parquet(result["parquet_files"][0]["url"], engine="auto") - else: - # in all these cases, the parquet files are not accessible without a token - with pytest.raises(Exception): - pd.read_parquet(result["parquet_files"][0]["url"], engine="auto") - r = requests.get( - result["parquet_files"][0]["url"], headers={"Authorization": f"Bearer {app_config.common.hf_token}"} - ) - assert r.status_code == HTTPStatus.OK, r.text - df = pd.read_parquet(io.BytesIO(r.content), engine="auto") - assert df.equals(data_df), df - - [email protected]( - "name,error_code,cause", - [ - ("empty", "EmptyDatasetError", "EmptyDatasetError"), - ("does_not_exist", "DatasetNotFoundError", None), - ("gated_extra_fields", "GatedExtraFieldsError", None), - ("private", "DatasetNotFoundError", None), - ], -) -def test_compute_splits_response_simple_csv_error( - hub_datasets: HubDatasets, - name: str, - error_code: str, - cause: str, - app_config: AppConfig, - parquet_config: ParquetConfig, -) -> None: - dataset = hub_datasets[name]["name"] - worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) - with pytest.raises(CustomError) as exc_info: +def test_doesnotexist(app_config: AppConfig) -> None: + dataset = "doesnotexist" + worker = get_worker(dataset=dataset, app_config=app_config) + with pytest.raises(DatasetNotFoundError): @@ -288,31 +85,0 @@ def test_compute_splits_response_simple_csv_error( - assert exc_info.value.code == error_code - if cause is None: - assert exc_info.value.disclose_cause is False - assert exc_info.value.cause_exception is None - else: - assert exc_info.value.disclose_cause is True - assert exc_info.value.cause_exception == cause - response = exc_info.value.as_response() - assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} - response_dict = dict(response) - # ^ to remove mypy warnings - assert response_dict["cause_exception"] == cause - assert isinstance(response_dict["cause_traceback"], list) - assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" - - [email protected]( - "filename,split,config,raises", - [ - ("config/builder-split.parquet", "split", "config", False), - ("config/builder-split-00000-of-00001.parquet", "split", "config", False), - ("builder-split-00000-of-00001.parquet", "split", "config", True), - ("config/builder-not-supported.parquet", "not-supported", "config", True), - ], -) -def test_parse_repo_filename(filename: str, split: str, config: str, raises: bool) -> None: - if raises: - with pytest.raises(Exception): - parse_repo_filename(filename) - else: - assert parse_repo_filename(filename) == (config, split) diff --git a/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py b/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py new file mode 100644 index 00000000..e7239bda --- /dev/null +++ b/workers/datasets_based/tests/workers/test_parquet_and_dataset_info.py @@ -0,0 +1,383 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import io +from http import HTTPStatus +from typing import Any, Iterator, List + +import pandas as pd +import pytest +import requests +from libcommon.exceptions import CustomError +from libcommon.simple_cache import DoesNotExist, get_response + +from datasets_based.config import AppConfig, ParquetAndDatasetInfoConfig +from datasets_based.workers.parquet_and_dataset_info import ( + DatasetInBlockListError, + DatasetTooBigFromDatasetsError, + DatasetTooBigFromHubError, + ParquetAndDatasetInfoWorker, + get_dataset_info_or_raise, + parse_repo_filename, + raise_if_blocked, + raise_if_not_supported, + raise_if_too_big_from_datasets, + raise_if_too_big_from_hub, +) + +from ..fixtures.hub import HubDatasets + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 [email protected](scope="module", autouse=True) +def set_supported_datasets(hub_datasets: HubDatasets) -> Iterator[pytest.MonkeyPatch]: + mp = pytest.MonkeyPatch() + mp.setenv( + "PARQUET_AND_DATASET_INFO_BLOCKED_DATASETS", + ",".join(value["name"] for value in hub_datasets.values() if "jsonl" in value["name"]), + ) + mp.setenv( + "PARQUET_AND_DATASET_INFO_SUPPORTED_DATASETS", + ",".join(value["name"] for value in hub_datasets.values() if "big" not in value["name"]), + ) + yield mp + mp.undo() + + [email protected] +def parquet_and_dataset_info_config( + set_env_vars: pytest.MonkeyPatch, set_supported_datasets: pytest.MonkeyPatch +) -> ParquetAndDatasetInfoConfig: + return ParquetAndDatasetInfoConfig.from_env() + + +def get_worker( + dataset: str, + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, + force: bool = False, +) -> ParquetAndDatasetInfoWorker: + return ParquetAndDatasetInfoWorker( + job_info={ + "type": ParquetAndDatasetInfoWorker.get_job_type(), + "dataset": dataset, + "config": None, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + parquet_and_dataset_info_config=parquet_and_dataset_info_config, + ) + + +def assert_content_is_equal(content: Any, expected: Any) -> None: + print(content) + assert set(content.keys()) == {"parquet_files", "dataset_info"}, content + assert content["parquet_files"] == expected["parquet_files"], content + assert len(content["dataset_info"]) == 1, content + content_value = list(content["dataset_info"].values())[0] + expected_value = list(expected["dataset_info"].values())[0] + assert set(content_value.keys()) == set(expected_value.keys()), content + for key in content_value.keys(): + if key != "download_checksums": + assert content_value[key] == expected_value[key], content + assert len(content_value["download_checksums"]) == 1, content + content_checksum = list(content_value["download_checksums"].values())[0] + expected_checksum = list(expected_value["download_checksums"].values())[0] + assert content_checksum == expected_checksum, content + + +def test_compute( + app_config: AppConfig, parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, hub_datasets: HubDatasets +) -> None: + dataset = hub_datasets["public"]["name"] + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + assert worker.process() is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + assert cached_response["worker_version"] == worker.get_version() + assert cached_response["dataset_git_revision"] is not None + content = cached_response["content"] + assert len(content["parquet_files"]) == 1 + assert_content_is_equal(content, hub_datasets["public"]["parquet_and_dataset_info_response"]) + + +def test_doesnotexist(app_config: AppConfig, parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig) -> None: + dataset = "doesnotexist" + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + assert worker.process() is False + with pytest.raises(DoesNotExist): + get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + + [email protected]( + "dataset,blocked,raises", + [ + ("public", ["public"], True), + ("public", ["public", "audio"], True), + ("public", ["audio"], False), + ("public", [], False), + ], +) +def test_raise_if_blocked(dataset: str, blocked: List[str], raises: bool) -> None: + if raises: + with pytest.raises(DatasetInBlockListError): + raise_if_blocked(dataset=dataset, blocked_datasets=blocked) + else: + raise_if_blocked(dataset=dataset, blocked_datasets=blocked) + + [email protected]( + "name,raises", + [("public", False), ("big", True)], +) +def test_raise_if_too_big_from_hub( + hub_datasets: HubDatasets, + name: str, + raises: bool, + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, +) -> None: + dataset = hub_datasets[name]["name"] + dataset_info = get_dataset_info_or_raise( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + revision="main", + ) + if raises: + with pytest.raises(DatasetTooBigFromHubError): + raise_if_too_big_from_hub( + dataset_info=dataset_info, max_dataset_size=parquet_and_dataset_info_config.max_dataset_size + ) + else: + raise_if_too_big_from_hub( + dataset_info=dataset_info, max_dataset_size=parquet_and_dataset_info_config.max_dataset_size + ) + + [email protected]( + "name,raises", + [("public", False), ("big", True)], +) +def test_raise_if_too_big_from_datasets( + hub_datasets: HubDatasets, + name: str, + raises: bool, + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, +) -> None: + dataset = hub_datasets[name]["name"] + if raises: + with pytest.raises(DatasetTooBigFromDatasetsError): + raise_if_too_big_from_datasets( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + revision="main", + max_dataset_size=parquet_and_dataset_info_config.max_dataset_size, + ) + else: + raise_if_too_big_from_datasets( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + revision="main", + max_dataset_size=parquet_and_dataset_info_config.max_dataset_size, + ) + + [email protected]( + "in_list,raises", + [ + (True, False), + (False, True), + ], +) +def test_raise_if_not_supported( + hub_public_big: str, + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, + in_list: bool, + raises: bool, +) -> None: + if raises: + with pytest.raises(DatasetTooBigFromDatasetsError): + raise_if_not_supported( + dataset=hub_public_big, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + committer_hf_token=parquet_and_dataset_info_config.committer_hf_token, + revision="main", + max_dataset_size=parquet_and_dataset_info_config.max_dataset_size, + supported_datasets=[hub_public_big] if in_list else ["another_dataset"], + blocked_datasets=[], + ) + else: + raise_if_not_supported( + dataset=hub_public_big, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + committer_hf_token=parquet_and_dataset_info_config.committer_hf_token, + revision="main", + max_dataset_size=parquet_and_dataset_info_config.max_dataset_size, + supported_datasets=[hub_public_big] if in_list else ["another_dataset"], + blocked_datasets=[], + ) + + +def test_not_supported_if_big( + app_config: AppConfig, parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, hub_public_big: str +) -> None: + # Not in the list of supported datasets and bigger than the maximum size + dataset = hub_public_big + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + assert worker.process() is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + assert cached_response["http_status"] == HTTPStatus.NOT_IMPLEMENTED + assert cached_response["error_code"] == "DatasetTooBigFromDatasetsError" + + +def test_supported_if_gated( + app_config: AppConfig, parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, hub_gated_csv: str +) -> None: + # Access should must be granted + dataset = hub_gated_csv + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + assert worker.process() is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + + +def test_not_supported_if_gated_with_extra_fields( + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, + hub_gated_extra_fields_csv: str, +) -> None: + # Access request should fail because extra fields in gated datasets are not supported + dataset = hub_gated_extra_fields_csv + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + assert worker.process() is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + assert cached_response["http_status"] == HTTPStatus.NOT_FOUND + assert cached_response["error_code"] == "GatedExtraFieldsError" + + +def test_blocked( + app_config: AppConfig, parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, hub_public_jsonl: str +) -> None: + # In the list of blocked datasets + dataset = hub_public_jsonl + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + assert worker.process() is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + assert cached_response["http_status"] == HTTPStatus.NOT_IMPLEMENTED + assert cached_response["error_code"] == "DatasetInBlockListError" + + [email protected]( + "name", + ["public", "audio", "gated"], +) +def test_compute_splits_response_simple_csv_ok( + hub_datasets: HubDatasets, + name: str, + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, + data_df: pd.DataFrame, +) -> None: + dataset = hub_datasets[name]["name"] + expected_parquet_and_dataset_info_response = hub_datasets[name]["parquet_and_dataset_info_response"] + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + result = worker.compute() + assert_content_is_equal(result, expected_parquet_and_dataset_info_response) + + # download the parquet file and check that it is valid + if name == "audio": + return + + if name == "public": + df = pd.read_parquet(result["parquet_files"][0]["url"], engine="auto") + else: + # in all these cases, the parquet files are not accessible without a token + with pytest.raises(Exception): + pd.read_parquet(result["parquet_files"][0]["url"], engine="auto") + r = requests.get( + result["parquet_files"][0]["url"], headers={"Authorization": f"Bearer {app_config.common.hf_token}"} + ) + assert r.status_code == HTTPStatus.OK, r.text + df = pd.read_parquet(io.BytesIO(r.content), engine="auto") + assert df.equals(data_df), df + + [email protected]( + "name,error_code,cause", + [ + ("empty", "EmptyDatasetError", "EmptyDatasetError"), + ("does_not_exist", "DatasetNotFoundError", None), + ("gated_extra_fields", "GatedExtraFieldsError", None), + ("private", "DatasetNotFoundError", None), + ], +) +def test_compute_splits_response_simple_csv_error( + hub_datasets: HubDatasets, + name: str, + error_code: str, + cause: str, + app_config: AppConfig, + parquet_and_dataset_info_config: ParquetAndDatasetInfoConfig, +) -> None: + dataset = hub_datasets[name]["name"] + worker = get_worker( + dataset=dataset, app_config=app_config, parquet_and_dataset_info_config=parquet_and_dataset_info_config + ) + with pytest.raises(CustomError) as exc_info: + worker.compute() + assert exc_info.value.code == error_code + if cause is None: + assert exc_info.value.disclose_cause is False + assert exc_info.value.cause_exception is None + else: + assert exc_info.value.disclose_cause is True + assert exc_info.value.cause_exception == cause + response = exc_info.value.as_response() + assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} + response_dict = dict(response) + # ^ to remove mypy warnings + assert response_dict["cause_exception"] == cause + assert isinstance(response_dict["cause_traceback"], list) + assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" + + [email protected]( + "filename,split,config,raises", + [ + ("config/builder-split.parquet", "split", "config", False), + ("config/builder-split-00000-of-00001.parquet", "split", "config", False), + ("builder-split-00000-of-00001.parquet", "split", "config", True), + ("config/builder-not-supported.parquet", "not-supported", "config", True), + ], +) +def test_parse_repo_filename(filename: str, split: str, config: str, raises: bool) -> None: + if raises: + with pytest.raises(Exception): + parse_repo_filename(filename) + else: + assert parse_repo_filename(filename) == (config, split) diff --git a/workers/datasets_based/tests/workers/test_splits.py b/workers/datasets_based/tests/workers/test_splits.py index 919c6db4..bcfca473 100644 --- a/workers/datasets_based/tests/workers/test_splits.py +++ b/workers/datasets_based/tests/workers/test_splits.py @@ -58,2 +58 @@ def test_process(app_config: AppConfig, hub_public_csv: str) -> None: - assert content["splits"][0]["num_bytes"] is None - assert content["splits"][0]["num_examples"] is None + assert "stats" not in content["splits"][0]
30b508c0c5d3e7c35885479a05613ddf0684d202
Sylvain Lesage
2023-01-16T12:53:53
Create children in generic worker (#677)
diff --git a/libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl new file mode 100644 index 00000000..7f2e2966 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.1.tar.gz b/libs/libcommon/dist/libcommon-0.6.1.tar.gz new file mode 100644 index 00000000..c6ce3880 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.1.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index cb2228fa..7da0cfe0 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.6.0" +version = "0.6.1" diff --git a/libs/libcommon/src/libcommon/simple_cache.py b/libs/libcommon/src/libcommon/simple_cache.py index 8da700d8..09723ee0 100644 --- a/libs/libcommon/src/libcommon/simple_cache.py +++ b/libs/libcommon/src/libcommon/simple_cache.py @@ -7 +7,12 @@ from http import HTTPStatus -from typing import Any, Generic, List, Mapping, Optional, Set, Type, TypedDict, TypeVar +from typing import ( + Any, + Generic, + List, + Mapping, + NamedTuple, + Optional, + Set, + Type, + TypedDict, + TypeVar, +) @@ -48,0 +60,8 @@ def get_datetime() -> datetime: +class SplitFullName(NamedTuple): + """A split full name is a tuple of (dataset, config, split).""" + + dataset: str + config: Optional[str] + split: Optional[str] + + @@ -183,17 +202,5 @@ def get_response(kind: str, dataset: str, config: Optional[str] = None, split: O -class ResponseId(TypedDict): - kind: str - dataset: str - config: Optional[str] - split: Optional[str] - - -def get_dataset_response_ids(dataset: str) -> List[ResponseId]: - return [ - { - "kind": response.kind, - "dataset": response.dataset, - "config": response.config, - "split": response.split, - } - for response in CachedResponse.objects(dataset=dataset).only("kind", "dataset", "config", "split") - ] +def get_split_full_names_for_dataset_and_kind(dataset: str, kind: str) -> set[SplitFullName]: + return { + SplitFullName(dataset=response.dataset, config=response.config, split=response.split) + for response in CachedResponse.objects(dataset=dataset, kind=kind).only("dataset", "config", "split") + } diff --git a/libs/libcommon/src/libcommon/worker.py b/libs/libcommon/src/libcommon/worker.py index 595532dd..1083c047 100644 --- a/libs/libcommon/src/libcommon/worker.py +++ b/libs/libcommon/src/libcommon/worker.py @@ -15,2 +15,8 @@ from libcommon.processing_graph import ProcessingStep -from libcommon.queue import JobInfo, Status -from libcommon.simple_cache import get_response_without_content, upsert_response +from libcommon.queue import JobInfo, Queue, Status +from libcommon.simple_cache import ( + SplitFullName, + delete_response, + get_response_without_content, + get_split_full_names_for_dataset_and_kind, + upsert_response, +) @@ -264,0 +271 @@ class Worker(ABC): + self.create_children_jobs(self.get_new_splits(content)) @@ -318,0 +326,50 @@ class Worker(ABC): + # should be overridden if the job has children jobs of type "split" + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute. + + Can be empty.""" + return set() + + def create_children_jobs(self, new_split_full_names: set[SplitFullName]) -> None: + """Create children jobs for the current job. + + Args: + new_split_full_names (:obj:`set[SplitFullName]`): the set of new splits, from the content created by the + compute. Can be empty. + """ + for processing_step in self.processing_step.children: + if processing_step.input_type == "dataset": + Queue(type=processing_step.job_type).add_job( + dataset=self.dataset, config=None, split=None, force=self.force + ) + elif processing_step.input_type == "split": + # remove obsolete responses from the cache + split_full_names_in_cache = get_split_full_names_for_dataset_and_kind( + dataset=self.dataset, kind=processing_step.cache_kind + ) + split_full_names_to_delete = split_full_names_in_cache.difference(new_split_full_names) + for split_full_name in split_full_names_to_delete: + delete_response( + kind=processing_step.cache_kind, + dataset=split_full_name.dataset, + config=split_full_name.config, + split=split_full_name.split, + ) + logging.debug( + f"{len(split_full_names_to_delete)} {processing_step.endpoint} responses deleted from the cache" + f" for obsolete splits of dataset={self.dataset}" + ) + # compute the responses for the new splits + for split_full_name in new_split_full_names: + # we force the refresh of the children step responses if the current step refresh was forced + Queue(type=processing_step.job_type).add_job( + dataset=split_full_name.dataset, + config=split_full_name.config, + split=split_full_name.split, + force=self.force, + ) + logging.debug( + f"{len(new_split_full_names)} {processing_step.job_type} jobs added for the splits of" + f" dataset={self.dataset}" + ) + diff --git a/libs/libcommon/tests/test_simple_cache.py b/libs/libcommon/tests/test_simple_cache.py index afa12c23..012d6326 100644 --- a/libs/libcommon/tests/test_simple_cache.py +++ b/libs/libcommon/tests/test_simple_cache.py @@ -16,0 +17 @@ from libcommon.simple_cache import ( + SplitFullName, @@ -21 +21,0 @@ from libcommon.simple_cache import ( - get_dataset_response_ids, @@ -24,0 +25 @@ from libcommon.simple_cache import ( + get_split_full_names_for_dataset_and_kind, @@ -207 +208 @@ def test_big_row() -> None: -def test_get_dataset_response_ids() -> None: +def test_get_split_full_names_for_dataset_and_kind() -> None: @@ -228,7 +229,2 @@ def test_get_dataset_response_ids() -> None: - result = get_dataset_response_ids(dataset=dataset_a) - expected = [ - {"kind": kind_a, "dataset": dataset_a, "config": None, "split": None}, - {"kind": kind_b, "dataset": dataset_a, "config": config_a, "split": split_a}, - {"kind": kind_b, "dataset": dataset_a, "config": config_b, "split": split_b}, - {"kind": kind_b, "dataset": dataset_a, "config": config_b, "split": split_a}, - ] + result = get_split_full_names_for_dataset_and_kind(dataset=dataset_a, kind=kind_a) + expected = {SplitFullName(dataset_a, None, None)} @@ -237,4 +233,12 @@ def test_get_dataset_response_ids() -> None: - assert get_dataset_response_ids(dataset=dataset_b) == [ - {"kind": kind_a, "dataset": dataset_b, "config": None, "split": None} - ] - assert get_dataset_response_ids(dataset=dataset_c) == [] + result = get_split_full_names_for_dataset_and_kind(dataset=dataset_a, kind=kind_b) + expected = { + SplitFullName(dataset_a, config_a, split_a), + SplitFullName(dataset_a, config_b, split_b), + SplitFullName(dataset_a, config_b, split_a), + } + assert len(result) == len(expected) and all(x in expected for x in result) + # ^ compare the contents of the lists without caring about the order + assert get_split_full_names_for_dataset_and_kind(dataset=dataset_b, kind=kind_a) == { + SplitFullName(dataset_b, None, None) + } + assert get_split_full_names_for_dataset_and_kind(dataset=dataset_c, kind=kind_a) == set() diff --git a/libs/libcommon/tests/test_worker.py b/libs/libcommon/tests/test_worker.py index f5111f33..9fed5fdb 100644 --- a/libs/libcommon/tests/test_worker.py +++ b/libs/libcommon/tests/test_worker.py @@ -6,3 +6,3 @@ from libcommon.config import CommonConfig -from libcommon.processing_graph import ProcessingStep -from libcommon.queue import _clean_queue_database -from libcommon.simple_cache import _clean_cache_database +from libcommon.processing_graph import ProcessingGraph, ProcessingStep +from libcommon.queue import Queue, Status, _clean_queue_database +from libcommon.simple_cache import SplitFullName, _clean_cache_database @@ -33,0 +34,3 @@ class DummyWorker(Worker): + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + return {SplitFullName(self.dataset, "config", "split1"), SplitFullName(self.dataset, "config", "split2")} + @@ -162,0 +166,38 @@ def test_check_type( + + +def test_create_children_jobs() -> None: + graph = ProcessingGraph( + { + "/dummy": {"input_type": "dataset"}, + "/child-dataset": {"input_type": "dataset", "requires": "/dummy"}, + "/child-split": {"input_type": "split", "requires": "/dummy"}, + } + ) + root_step = graph.get_step("/dummy") + worker = DummyWorker( + job_info={ + "job_id": "job_id", + "type": root_step.job_type, + "dataset": "dataset", + "config": None, + "split": None, + "force": False, + }, + processing_step=root_step, + common_config=CommonConfig(), + ) + assert worker.should_skip_job() is False + # we add an entry to the cache + worker.process() + assert worker.should_skip_job() is True + # check that the children jobs have been created + child_dataset_jobs = Queue(type="/child-dataset").get_dump_with_status(status=Status.WAITING) + assert len(child_dataset_jobs) == 1 + assert child_dataset_jobs[0]["dataset"] == "dataset" + assert child_dataset_jobs[0]["config"] is None + assert child_dataset_jobs[0]["split"] is None + child_split_jobs = Queue(type="/child-split").get_dump_with_status(status=Status.WAITING) + assert len(child_split_jobs) == 2 + assert all(job["dataset"] == "dataset" and job["config"] == "config" for job in child_split_jobs) + # we don't know the order + assert {child_split_jobs[0]["split"], child_split_jobs[1]["split"]} == {"split1", "split2"} diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 08cc79e0..d517cf1d 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -891 +891 @@ name = "libcommon" -version = "0.6.0" +version = "0.6.1" @@ -909 +909 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl" @@ -2460 +2460 @@ python-versions = "3.9.15" -content-hash = "9732f429697818b1b3b3dca094f90c50eb148d747a00c0f43ee8bd9255bdbf39" +content-hash = "b596bc1e1812eeab2eade45c68f8f0d985404600bb94cb2b14012c8f9f0ae84e" @@ -3367 +3367 @@ libcommon = [ - {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, + {file = "libcommon-0.6.1-py3-none-any.whl", hash = "sha256:e62070144ec77422c60b915c351f15c06a27ebe0fd30024ebac7f4f35d250454"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 0a7a9eb7..0b296faa 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.1-py3-none-any.whl", develop = false } diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index 4811a9d2..170f6e64 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -21,0 +22 @@ from libcommon.exceptions import CustomError +from libcommon.simple_cache import SplitFullName as _SplitFullName @@ -580,0 +582,6 @@ class FirstRowsWorker(DatasetsBasedWorker): + + def get_new_splits(self, _: Mapping[str, Any]) -> set[_SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + if self.config is None or self.split is None: + raise ValueError("config and split are required") + return {_SplitFullName(dataset=self.dataset, config=self.config, split=self.split)} diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index d5e22df4..e577d712 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -26,0 +27 @@ from libcommon.exceptions import CustomError +from libcommon.simple_cache import SplitFullName @@ -597,0 +599,7 @@ class ParquetWorker(DatasetsBasedWorker): + + def get_new_splits(self, content: Mapping[str, Any]) -> set[SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return { + SplitFullName(dataset=parquet_file["dataset"], config=parquet_file["config"], split=parquet_file["split"]) + for parquet_file in content["parquet_files"] + } diff --git a/workers/datasets_based/src/datasets_based/workers/splits.py b/workers/datasets_based/src/datasets_based/workers/splits.py index 9cbb0187..81fa18cd 100644 --- a/workers/datasets_based/src/datasets_based/workers/splits.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -16,2 +16 @@ from libcommon.exceptions import CustomError -from libcommon.queue import Queue -from libcommon.simple_cache import delete_response, get_dataset_response_ids +from libcommon.simple_cache import SplitFullName as _SplitFullName @@ -88 +87 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - {"dataset": dataset, "config": config, "split": split} + {"dataset": dataset, "config": str(config), "split": str(split)} @@ -174 +173 @@ class SplitsWorker(DatasetsBasedWorker): - content = compute_splits_response(dataset=self.dataset, hf_token=self.common_config.hf_token) + return compute_splits_response(dataset=self.dataset, hf_token=self.common_config.hf_token) @@ -176,27 +175,3 @@ class SplitsWorker(DatasetsBasedWorker): - new_splits = [(s["dataset"], s["config"], s["split"]) for s in content["splits"]] - for step in self.processing_step.children: - if step.input_type == "dataset": - Queue(type=step.job_type).add_job( - dataset=self.dataset, config=self.config, split=self.split, force=self.force - ) - else: - # remove obsolete responses from the cache - responses_in_cache = [ - (s["dataset"], s["config"], s["split"]) - for s in get_dataset_response_ids(dataset=self.dataset) - if s["kind"] == step.cache_kind - ] - responses_to_delete = [s for s in responses_in_cache if s not in new_splits] - for d, c, s in responses_to_delete: - delete_response(kind=step.cache_kind, dataset=d, config=c, split=s) - logging.debug( - f"{len(responses_to_delete)} {step.endpoint} responses deleted from the cache for obsolete" - f" splits of dataset={self.dataset}" - ) - # compute the responses for the new splits - for d, c, s in new_splits: - # we force the refresh of the /first_rows responses if the /splits refresh was forced - Queue(type=step.job_type).add_job(dataset=d, config=c, split=s, force=self.force) - logging.debug(f"{len(new_splits)} {step.job_type} jobs added for the splits of dataset={self.dataset}") - - return content + def get_new_splits(self, content: Mapping[str, Any]) -> set[_SplitFullName]: + """Get the set of new splits, from the content created by the compute.""" + return {_SplitFullName(dataset=s["dataset"], config=s["config"], split=s["split"]) for s in content["splits"]}
7c67cf1839908e4f55219dfc11e602bd4eb2038e
Sylvain Lesage
2023-01-16T12:30:02
ci: 🎡 fix app token (#678)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 93f019c3..441cb039 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -62 +62 @@ jobs: - COMMON_HF_TOKEN: "hf_datasets-server_token" + COMMON_HF_TOKEN: "hf_app_datasets-server_token" @@ -82 +82 @@ jobs: - COMMON_HF_TOKEN: "hf_datasets-server_token" + COMMON_HF_TOKEN: "hf_app_datasets-server_token" diff --git a/e2e/Makefile b/e2e/Makefile index 5c8cafc2..3afd578e 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -7 +7 @@ export COMMON_HF_ENDPOINT := https://hub-ci.huggingface.co -export COMMON_HF_TOKEN := hf_datasets-server_token +export COMMON_HF_TOKEN := hf_app_datasets-server_token diff --git a/workers/datasets_based/tests/constants.py b/workers/datasets_based/tests/constants.py index d30c6ca6..66dbb47a 100644 --- a/workers/datasets_based/tests/constants.py +++ b/workers/datasets_based/tests/constants.py @@ -5 +5 @@ -CI_APP_TOKEN = "hf_datasets-server_token" +CI_APP_TOKEN = "hf_app_datasets-server_token"
d0ce1acf003f2a17d477b6f3a9def13cdea2f374
Sylvain Lesage
2023-01-11T15:01:11
fix: 🐛 only check webhook payload for what we are interested in (#676)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 4d6b8b2b..fe5ac168 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -9 +9 @@ - "api": "huggingface/datasets-server-services-api:sha-6a36caa" + "api": "huggingface/datasets-server-services-api:sha-a36b651" diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index 114ad5aa..67bdfaff 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -22 +21,0 @@ schema = { - "movedToAuthorId": {"type": "string"}, @@ -26,4 +24,0 @@ schema = { - "id": {"type": "string"}, - "authorId": {"type": "string"}, - "gitalyUid": {"type": "string"}, - "headSha": {"type": "string"}, @@ -31,5 +25,0 @@ schema = { - "private": {"type": "boolean"}, - "subdomain": {"type": "string"}, - # ^ subdomain is for spaces - "tags": {"type": "array", "items": {"type": "string"}}, - # ^ tags are only sent for models @@ -37 +26,0 @@ schema = { - "url": {"type": "string", "format": "uri"}, @@ -49,2 +37,0 @@ class MoonWebhookV2PayloadRepo(TypedDict): - gitalyUid: str - tags: Optional[List[str]] diff --git a/services/api/tests/routes/test_webhook.py b/services/api/tests/routes/test_webhook.py index 70726f15..c309ee5e 100644 --- a/services/api/tests/routes/test_webhook.py +++ b/services/api/tests/routes/test_webhook.py @@ -24,0 +25,26 @@ from api.routes.webhook import parse_payload + ( + { + "event": "update", + "scope": "repo.content", + "repo": { + "type": "dataset", + "name": "AresEkb/prof_standards_sbert_large_mt_nlu_ru", + "id": "63bab13ae0f4fee16cebf084", + "private": False, + "url": { + "web": "https://huggingface.co/datasets/AresEkb/prof_standards_sbert_large_mt_nlu_ru", + "api": "https://huggingface.co/api/datasets/AresEkb/prof_standards_sbert_large_mt_nlu_ru", + }, + "headSha": "c926e6ce93cbd5a6eaf0895abd48776cc5bae638", + "gitalyUid": "c5afeca93171cfa1f6c138ef683df4a53acffd8c86283ab8e7e338df369d2fff", + "authorId": "6394b8740b746ac6a969bd51", + "tags": [], + }, + "webhook": {"id": "632c22b3df82fca9e3b46154", "version": 2}, + }, + False, + ), + ( + {"event": "update", "repo": {"type": "dataset", "name": "AresEkb/prof_standards_sbert_large_mt_nlu_ru"}}, + False, + ),
9c9036007094b8cf93b54947b5c41b3accc63bae
Sylvain Lesage
2023-01-05T09:41:35
feat: 🎸 allow more concurrent jobs fo the same namespace (#675)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 0f044929..dce0f63c 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -179 +179 @@ splits: - maxJobsPerNamespace: 1 + maxJobsPerNamespace: 4 @@ -222 +222 @@ parquet: - maxJobsPerNamespace: 2 + maxJobsPerNamespace: 4
5edcb54ffce0bd9392b221e008306d7feb12dd6a
Sylvain Lesage
2023-01-02T17:21:51
feat: 🎸 update the HF webhook content (#671)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index cc96e554..4d6b8b2b 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-1edecd9" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-6a36caa" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-9a6e46b", - "api": "huggingface/datasets-server-services-api:sha-9a6e46b" + "admin": "huggingface/datasets-server-services-admin:sha-6a36caa", + "api": "huggingface/datasets-server-services-api:sha-6a36caa" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-81a40a2" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-6a36caa" diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile index 8257adf9..5947de24 100644 --- a/jobs/mongodb_migration/Dockerfile +++ b/jobs/mongodb_migration/Dockerfile @@ -29,0 +30 @@ COPY jobs/mongodb_migration/src ./src +RUN poetry install diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index d222d07f..5adbb598 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -29,0 +30 @@ COPY services/admin/src ./src +RUN poetry install diff --git a/services/api/Dockerfile b/services/api/Dockerfile index 4076c0a4..5deb45d3 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -29,0 +30 @@ COPY services/api/src ./src +RUN poetry install diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index 0f25148c..114ad5aa 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -21,0 +22 @@ schema = { + "movedToAuthorId": {"type": "string"}, @@ -25,2 +26,2 @@ schema = { - "type": {"type": "string", "enum": ["dataset", "model", "space"]}, - "name": {"type": "string"}, + "id": {"type": "string"}, + "authorId": {"type": "string"}, @@ -27,0 +29,5 @@ schema = { + "headSha": {"type": "string"}, + "name": {"type": "string"}, + "private": {"type": "boolean"}, + "subdomain": {"type": "string"}, + # ^ subdomain is for spaces @@ -28,0 +35,3 @@ schema = { + # ^ tags are only sent for models + "type": {"type": "string", "enum": ["dataset", "model", "space"]}, + "url": {"type": "string", "format": "uri"}, diff --git a/workers/datasets_based/Dockerfile b/workers/datasets_based/Dockerfile index d661316f..75e3edab 100644 --- a/workers/datasets_based/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -32,0 +33 @@ COPY workers/datasets_based/src ./src +RUN poetry install
9bdd3f0b5d8b38d2cace2d7220522ade17038075
Anthony MOI
2023-01-02T11:30:30
Small tweaks on Helm charts (#649)
diff --git a/chart/.helmignore b/chart/.helmignore index 0e8a0eb3..64aea69c 100644 --- a/chart/.helmignore +++ b/chart/.helmignore @@ -23,0 +24,2 @@ +# Helm envs +env diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 45b612b1..0f044929 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -120,0 +121,3 @@ ingress: + tls: + - hosts: + - "datasets-server.huggingface.co" diff --git a/chart/templates/_envCommon.tpl b/chart/templates/_envCommon.tpl index 185f168e..87f2b807 100644 --- a/chart/templates/_envCommon.tpl +++ b/chart/templates/_envCommon.tpl @@ -12,0 +13,3 @@ + {{- if eq .Values.secrets.appHfToken.secretName "" }} + name: {{ .Release.Name }}-datasets-server-app-token + {{- else }} @@ -13,0 +17 @@ + {{- end }} diff --git a/chart/templates/ingress.yaml b/chart/templates/ingress.yaml index 8b3e9f1c..31547760 100644 --- a/chart/templates/ingress.yaml +++ b/chart/templates/ingress.yaml @@ -9,6 +9 @@ spec: - {{- if .Values.ingress.tls.enabled }} - tls: - - hosts: - - {{ .Values.hostname }} - secretName: {{ .Values.ingress.tls.secretName }} - {{- end}} + tls: {{ toYaml .Values.ingress.tls | nindent 4 }} diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml index 365a798c..d0021fdf 100644 --- a/chart/templates/jobs/mongodb-migration/job.yaml +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -3,0 +4 @@ +{{- if .Values.dockerImage.jobs.mongodbMigration }} @@ -26,0 +28 @@ spec: +{{- end}} diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 16ffba97..af8b6f96 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -7 +7 @@ - imagePullPolicy: IfNotPresent + imagePullPolicy: {{ .Values.docker.pullPolicy }} diff --git a/chart/templates/worker/parquet/_container.tpl b/chart/templates/worker/parquet/_container.tpl index 82b06a2e..1f2d7d1b 100644 --- a/chart/templates/worker/parquet/_container.tpl +++ b/chart/templates/worker/parquet/_container.tpl @@ -7 +7 @@ - imagePullPolicy: IfNotPresent + imagePullPolicy: {{ .Values.docker.pullPolicy }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index b57dccda..1e77431f 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -7 +7 @@ - imagePullPolicy: IfNotPresent + imagePullPolicy: {{ .Values.docker.pullPolicy }} diff --git a/chart/values.yaml b/chart/values.yaml index 44714208..f58427a1 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -12 +12 @@ secrets: - secretName: "datasets-server-hf-token" + secretName: "" @@ -40 +40,4 @@ imagePullSecrets: [] -# overridden by docker-images.yaml (which must be in JSON format!) +docker: + pullPolicy: IfNotPresent + +# overridden by docker-images.yaml (which must be in JSON format!). See Makefile for details. @@ -138,9 +141,2 @@ ingress: - tls: - enabled: false - secretName: "" - annotations: - alb.ingress.kubernetes.io/healthcheck-path: "/healthcheck" - alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80, "HTTPS": 443}]' - alb.ingress.kubernetes.io/scheme: "internet-facing" - alb.ingress.kubernetes.io/group.name: "datasets-server" - kubernetes.io/ingress.class: "alb" + tls: [] + annotations: {}
e31129ea284d3895fbcee29a146db494d19e4e72
Sylvain Lesage
2022-12-23T13:14:18
chore: 🤖 speed-up docker build (#669)
diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile index 301a9861..8257adf9 100644 --- a/jobs/mongodb_migration/Dockerfile +++ b/jobs/mongodb_migration/Dockerfile @@ -24,2 +23,0 @@ WORKDIR /src -COPY libs/libcommon/dist ./libs/libcommon/dist -COPY jobs/mongodb_migration/src ./jobs/mongodb_migration/src @@ -27,0 +26 @@ COPY jobs/mongodb_migration/pyproject.toml ./jobs/mongodb_migration/pyproject.to +COPY libs/libcommon/dist ./libs/libcommon/dist @@ -29,0 +29 @@ RUN poetry install +COPY jobs/mongodb_migration/src ./src diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index a7b70180..d222d07f 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -24,2 +23,0 @@ WORKDIR /src -COPY libs/libcommon/dist ./libs/libcommon/dist -COPY services/admin/src ./services/admin/src @@ -27,0 +26 @@ COPY services/admin/pyproject.toml ./services/admin/pyproject.toml +COPY libs/libcommon/dist ./libs/libcommon/dist @@ -29,0 +29 @@ RUN poetry install +COPY services/admin/src ./src diff --git a/services/api/Dockerfile b/services/api/Dockerfile index c85b3d65..4076c0a4 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -24,2 +23,0 @@ WORKDIR /src -COPY libs/libcommon/dist ./libs/libcommon/dist -COPY services/api/src ./services/api/src @@ -27,0 +26 @@ COPY services/api/pyproject.toml ./services/api/pyproject.toml +COPY libs/libcommon/dist ./libs/libcommon/dist @@ -29,0 +29 @@ RUN poetry install +COPY services/api/src ./src diff --git a/workers/datasets_based/Dockerfile b/workers/datasets_based/Dockerfile index bb7850f1..d661316f 100644 --- a/workers/datasets_based/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -26,2 +26 @@ WORKDIR /src -COPY libs/libcommon/dist ./libs/libcommon/dist -COPY workers/datasets_based/src ./workers/datasets_based/src +COPY workers/datasets_based/vendors ./workers/datasets_based/vendors/ @@ -30 +29 @@ COPY workers/datasets_based/pyproject.toml ./workers/datasets_based/pyproject.to -COPY workers/datasets_based/vendors ./workers/datasets_based/vendors/ +COPY libs/libcommon/dist ./libs/libcommon/dist @@ -32,0 +32 @@ RUN poetry install +COPY workers/datasets_based/src ./src
ac28b955b92603d3e47e40ae5a6178a06c7bd70c
Sylvain Lesage
2022-12-23T10:55:03
Split Worker into WorkerLoop, WorkerFactory and Worker (#668)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index d2967925..cc96e554 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-7b4762b" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-1edecd9" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-adde39b", - "api": "huggingface/datasets-server-services-api:sha-7b4762b" + "admin": "huggingface/datasets-server-services-admin:sha-9a6e46b", + "api": "huggingface/datasets-server-services-api:sha-9a6e46b" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-e36bd91" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-81a40a2" diff --git a/chart/templates/_envWorker.tpl b/chart/templates/_envWorker.tpl deleted file mode 100644 index a6045b96..00000000 --- a/chart/templates/_envWorker.tpl +++ /dev/null @@ -1,11 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -{{- define "envWorker" -}} -- name: WORKER_MAX_LOAD_PCT - value: {{ .Values.worker.maxLoadPct | quote }} -- name: WORKER_MAX_MEMORY_PCT - value: {{ .Values.worker.maxMemoryPct | quote }} -- name: WORKER_WORKER_SLEEP_SECONDS - value: {{ .Values.worker.sleepSeconds | quote }} -{{- end -}} diff --git a/chart/templates/_envWorkerLoop.tpl b/chart/templates/_envWorkerLoop.tpl new file mode 100644 index 00000000..e4d922bc --- /dev/null +++ b/chart/templates/_envWorkerLoop.tpl @@ -0,0 +1,13 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envWorkerLoop" -}} +- name: WORKER_LOOP_MAX_DISK_USAGE_PCT + value: {{ .Values.workerLoop.maxDiskUsagePct | quote }} +- name: WORKER_LOOP_MAX_LOAD_PCT + value: {{ .Values.workerLoop.maxLoadPct | quote }} +- name: WORKER_LOOP_MAX_MEMORY_PCT + value: {{ .Values.workerLoop.maxMemoryPct | quote }} +- name: WORKER_LOOP_SLEEP_SECONDS + value: {{ .Values.workerLoop.sleepSeconds | quote }} +{{- end -}} diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 9b2a98a5..16ffba97 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -16 +16,4 @@ - {{ include "envWorker" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} + - name: WORKER_LOOP_STORAGE_PATHS + value: {{ .Values.assets.storageDirectory | quote }} + # ^ note: the datasets cache is automatically added, so no need to add it here diff --git a/chart/templates/worker/parquet/_container.tpl b/chart/templates/worker/parquet/_container.tpl index 59241834..82b06a2e 100644 --- a/chart/templates/worker/parquet/_container.tpl +++ b/chart/templates/worker/parquet/_container.tpl @@ -15 +15 @@ - {{ include "envWorker" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 2c25f8c7..b57dccda 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -15 +15 @@ - {{ include "envWorker" . | nindent 2 }} + {{ include "envWorkerLoop" . | nindent 2 }} diff --git a/chart/values.yaml b/chart/values.yaml index 52e2f992..44714208 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -61,2 +61,4 @@ queue: -worker: - # Max CPU load (%) - if reached, sleeps until it comes back under the limit +workerLoop: + # maximum disk usage of every storage disk in the list (in percentage) to allow a job to start. Set to 0 to disable the test. + maxDiskUsagePct: 90 + # Max CPU load (%) - if reached, sleeps until it comes back under the limit. Set to 0 to disable the test. @@ -64 +66 @@ worker: - # Max memory (RAM + SWAP) (%) - if reached, sleeps until it comes back under the limit + # Max memory (RAM + SWAP) (%) - if reached, sleeps until it comes back under the limit. Set to 0 to disable the test. diff --git a/jobs/mongodb_migration/README.md b/jobs/mongodb_migration/README.md index c1dcb004..ecac3b78 100644 --- a/jobs/mongodb_migration/README.md +++ b/jobs/mongodb_migration/README.md @@ -7 +7 @@ -The script con be configured using environment variables. They are grouped by scope. +The script can be configured using environment variables. They are grouped by scope. diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index cda294e3..b656e8a7 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -320 +320 @@ name = "libcommon" -version = "0.5.10" +version = "0.6.0" @@ -338 +338 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" @@ -865 +865 @@ python-versions = "3.9.15" -content-hash = "bbaaeaa2093464c1e5204af154045c1b0524614bc886bc543f7ee469572f55b6" +content-hash = "ac7869cb48f3730d996eb5a7aff9ef4ecaa9bd2a66804a9a4301aa8a6b27decf" @@ -1032 +1032 @@ libcommon = [ - {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, + {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 3a7a8e90..3779d1f4 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } diff --git a/jobs/mongodb_migration/src/mongodb_migration/config.py b/jobs/mongodb_migration/src/mongodb_migration/config.py index e00b74ee..cbf68dca 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/config.py +++ b/jobs/mongodb_migration/src/mongodb_migration/config.py @@ -3,0 +4,2 @@ +from dataclasses import dataclass, field + @@ -8,0 +11,3 @@ from mongodb_migration.database_migrations import connect_to_database +MONGODB_MIGRATION_MONGO_DATABASE = "datasets_server_maintenance" +MONGO_DATABASE_MONGO_URL = "mongodb://localhost:27017" + @@ -9,0 +15 @@ from mongodb_migration.database_migrations import connect_to_database +@dataclass @@ -11,2 +17,5 @@ class MongodbMigrationConfig: - mongo_database: str - mongo_url: str + mongo_database: str = MONGODB_MIGRATION_MONGO_DATABASE + mongo_url: str = MONGO_DATABASE_MONGO_URL + + def __post_init__(self): + connect_to_database(database=self.mongo_database, host=self.mongo_url) @@ -14 +23,2 @@ class MongodbMigrationConfig: - def __init__(self): + @staticmethod + def from_env() -> "MongodbMigrationConfig": @@ -17,6 +27,4 @@ class MongodbMigrationConfig: - self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_maintenance") - self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") - self.setup() - - def setup(self): - connect_to_database(database=self.mongo_database, host=self.mongo_url) + return MongodbMigrationConfig( + mongo_database=env.str(name="MONGO_DATABASE", default=MONGODB_MIGRATION_MONGO_DATABASE), + mongo_url=env.str(name="MONGO_URL", default=MONGO_DATABASE_MONGO_URL), + ) @@ -24,0 +33 @@ class MongodbMigrationConfig: +@dataclass @@ -26,11 +35,13 @@ class JobConfig: - cache: CacheConfig - common: CommonConfig - mongodb_migration: MongodbMigrationConfig - queue: QueueConfig - - def __init__(self): - # First process the common configuration to setup the logging - self.common = CommonConfig() - self.cache = CacheConfig() - self.mongodb_migration = MongodbMigrationConfig() - self.queue = QueueConfig() + cache: CacheConfig = field(default_factory=CacheConfig) + common: CommonConfig = field(default_factory=CommonConfig) + mongodb_migration: MongodbMigrationConfig = field(default_factory=MongodbMigrationConfig) + queue: QueueConfig = field(default_factory=QueueConfig) + + @staticmethod + def from_env() -> "JobConfig": + return JobConfig( + common=CommonConfig.from_env(), + cache=CacheConfig.from_env(), + mongodb_migration=MongodbMigrationConfig.from_env(), + queue=QueueConfig.from_env(), + ) diff --git a/jobs/mongodb_migration/src/mongodb_migration/main.py b/jobs/mongodb_migration/src/mongodb_migration/main.py index 10838526..58be3983 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/main.py +++ b/jobs/mongodb_migration/src/mongodb_migration/main.py @@ -11 +11 @@ if __name__ == "__main__": - job_config = JobConfig() + job_config = JobConfig.from_env() diff --git a/jobs/mongodb_migration/tests/conftest.py b/jobs/mongodb_migration/tests/conftest.py index 14fea96f..6c5cf611 100644 --- a/jobs/mongodb_migration/tests/conftest.py +++ b/jobs/mongodb_migration/tests/conftest.py @@ -22 +22 @@ def app_config(monkeypatch_session: MonkeyPatch) -> JobConfig: - job_config = JobConfig() + job_config = JobConfig.from_env() diff --git a/libs/libcommon/README.md b/libs/libcommon/README.md index c5f13c48..c5996a7d 100644 --- a/libs/libcommon/README.md +++ b/libs/libcommon/README.md @@ -35 +35 @@ Set environment variables to configure the job queues to precompute API response -## Worker configuration +## Worker loop configuration @@ -37 +37 @@ Set environment variables to configure the job queues to precompute API response -Set environment variables to configure the worker that processes the queue. +Set environment variables to configure the worker loop that processes the queue. @@ -39,3 +39,5 @@ Set environment variables to configure the worker that processes the queue. -- `WORKER_MAX_LOAD_PCT`: maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of CPUs \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. -- `WORKER_MAX_MEMORY_PCT`: maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. -- `WORKER_SLEEP_SECONDS`: duration in seconds that a worker waits at each loop iteration before checking if resources are available and processing a job if any is available. Note that the worker does not sleep on the first loop after finishing a job. Defaults to `15`. +- `WORKER_LOOP_MAX_DISK_USAGE_PCT`: maximum disk usage of every storage disk in the list (in percentage) to allow a job to start. Set to 0 to disable the test. Defaults to 90. +- `WORKER_LOOP_MAX_LOAD_PCT`: maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of CPUs \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. +- `WORKER_LOOP_MAX_MEMORY_PCT`: maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. +- `WORKER_LOOP_SLEEP_SECONDS`: wait duration in seconds at each loop iteration before checking if resources are available and processing a job if any is available. Note that the loop doesn't wait just after finishing a job: the next job is immediately processed. Defaults to `15`. +- `WORKER_LOOP_STORAGE_PATHS`: comma-separated list of paths to check for disk usage. Defaults to empty. diff --git a/libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl new file mode 100644 index 00000000..f4edf7ee Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.6.0.tar.gz b/libs/libcommon/dist/libcommon-0.6.0.tar.gz new file mode 100644 index 00000000..5ce5b739 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.6.0.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 47f4c352..cb2228fa 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.5.11" +version = "0.6.0" diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index b5495b17..e6ab02f1 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -4 +4,4 @@ -from typing import Optional + +import logging +from dataclasses import dataclass, field +from typing import List, Optional @@ -13,0 +17,3 @@ from libcommon.storage import init_dir +ASSETS_BASE_URL = "assets" +ASSETS_STORE_DIRECTORY = None + @@ -14,0 +21 @@ from libcommon.storage import init_dir +@dataclass @@ -16,2 +23,5 @@ class AssetsConfig: - base_url: str - storage_directory: str + base_url: str = ASSETS_BASE_URL + _storage_directory: Optional[str] = ASSETS_STORE_DIRECTORY + + def __post_init__(self): + self.storage_directory = init_dir(directory=self._storage_directory, appname="datasets_server_assets") @@ -19 +29,2 @@ class AssetsConfig: - def __init__(self): + @staticmethod + def from_env() -> "AssetsConfig": @@ -22,3 +33,4 @@ class AssetsConfig: - self.base_url = env.str(name="BASE_URL", default="assets") - self._storage_directory = env.str(name="STORAGE_DIRECTORY", default=None) - self.setup() + return AssetsConfig( + base_url=env.str(name="BASE_URL", default=ASSETS_BASE_URL), + _storage_directory=env.str(name="STORAGE_DIRECTORY", default=ASSETS_STORE_DIRECTORY), + ) @@ -26,2 +38,4 @@ class AssetsConfig: - def setup(self): - self.storage_directory = init_dir(directory=self._storage_directory, appname="datasets_server_assets") + +COMMON_HF_ENDPOINT = "https://huggingface.co" +COMMON_HF_TOKEN = None +COMMON_LOG_LEVEL = logging.INFO @@ -29,0 +44 @@ class AssetsConfig: +@dataclass @@ -31,3 +46,3 @@ class CommonConfig: - hf_endpoint: str - hf_token: Optional[str] - log_level: int + hf_endpoint: str = COMMON_HF_ENDPOINT + hf_token: Optional[str] = COMMON_HF_TOKEN + log_level: int = COMMON_LOG_LEVEL @@ -35 +50,5 @@ class CommonConfig: - def __init__(self): + def __post_init__(self): + init_logging(self.log_level) + + @staticmethod + def from_env() -> "CommonConfig": @@ -38,5 +57,5 @@ class CommonConfig: - self.hf_endpoint = env.str(name="HF_ENDPOINT", default="https://huggingface.co") - self.log_level = env.log_level(name="LOG_LEVEL", default="INFO") - hf_token = env.str(name="HF_TOKEN", default="") - self.hf_token = None if hf_token == "" else hf_token # nosec - self.setup() + return CommonConfig( + hf_endpoint=env.str(name="HF_ENDPOINT", default=COMMON_HF_ENDPOINT), + hf_token=env.str(name="HF_TOKEN", default=COMMON_HF_TOKEN), # nosec + log_level=env.log_level(name="LOG_LEVEL", default=COMMON_LOG_LEVEL), + ) @@ -44,2 +63,3 @@ class CommonConfig: - def setup(self): - init_logging(self.log_level) + +CACHE_MONGO_DATABASE = "datasets_server_cache" +CACHE_MONGO_URL = "mongodb://localhost:27017" @@ -47,0 +68 @@ class CommonConfig: +@dataclass @@ -49,2 +70,2 @@ class CacheConfig: - mongo_database: str - mongo_url: str + mongo_database: str = CACHE_MONGO_DATABASE + mongo_url: str = CACHE_MONGO_URL @@ -52 +73,5 @@ class CacheConfig: - def __init__(self): + def __post_init__(self): + connect_to_cache_database(database=self.mongo_database, host=self.mongo_url) + + @staticmethod + def from_env() -> "CacheConfig": @@ -55,3 +80,4 @@ class CacheConfig: - self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_cache") - self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") - self.setup() + return CacheConfig( + mongo_database=env.str(name="MONGO_DATABASE", default=CACHE_MONGO_DATABASE), + mongo_url=env.str(name="MONGO_URL", default=CACHE_MONGO_URL), + ) @@ -59,2 +84,0 @@ class CacheConfig: - def setup(self): - connect_to_cache_database(database=self.mongo_database, host=self.mongo_url) @@ -61,0 +86,3 @@ class CacheConfig: +QUEUE_MAX_JOBS_PER_NAMESPACE = 1 +QUEUE_MONGO_DATABASE = "datasets_server_queue" +QUEUE_MONGO_URL = "mongodb://localhost:27017" @@ -62,0 +90,2 @@ class CacheConfig: + +@dataclass @@ -64,3 +93,6 @@ class QueueConfig: - max_jobs_per_namespace: int - mongo_database: str - mongo_url: str + max_jobs_per_namespace: int = QUEUE_MAX_JOBS_PER_NAMESPACE + mongo_database: str = QUEUE_MONGO_DATABASE + mongo_url: str = QUEUE_MONGO_URL + + def __post_init__(self): + connect_to_queue_database(database=self.mongo_database, host=self.mongo_url) @@ -68 +100,2 @@ class QueueConfig: - def __init__(self): + @staticmethod + def from_env() -> "QueueConfig": @@ -71,4 +104,11 @@ class QueueConfig: - self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_queue") - self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") - self.max_jobs_per_namespace = env.int(name="MAX_JOBS_PER_NAMESPACE", default=1) - self.setup() + return QueueConfig( + max_jobs_per_namespace=env.int(name="MAX_JOBS_PER_NAMESPACE", default=QUEUE_MAX_JOBS_PER_NAMESPACE), + mongo_database=env.str(name="MONGO_DATABASE", default=QUEUE_MONGO_DATABASE), + mongo_url=env.str(name="MONGO_URL", default=QUEUE_MONGO_URL), + ) + + +WORKER_LOOP_MAX_DISK_USAGE_PCT = 90 +WORKER_LOOP_MAX_LOAD_PCT = 70 +WORKER_LOOP_MAX_MEMORY_PCT = 80 +WORKER_LOOP_SLEEP_SECONDS = 15 @@ -76,2 +115,0 @@ class QueueConfig: - def setup(self): - connect_to_queue_database(database=self.mongo_database, host=self.mongo_url) @@ -78,0 +117,2 @@ class QueueConfig: +def get_empty_str_list() -> List[str]: + return [] @@ -80,4 +119,0 @@ class QueueConfig: -class WorkerConfig: - max_load_pct: int - max_memory_pct: int - sleep_seconds: int @@ -85 +121,10 @@ class WorkerConfig: - def __init__(self): +@dataclass +class WorkerLoopConfig: + max_disk_usage_pct: int = WORKER_LOOP_MAX_DISK_USAGE_PCT + max_load_pct: int = WORKER_LOOP_MAX_LOAD_PCT + max_memory_pct: int = WORKER_LOOP_MAX_MEMORY_PCT + sleep_seconds: int = WORKER_LOOP_SLEEP_SECONDS + storage_paths: List[str] = field(default_factory=get_empty_str_list) + + @staticmethod + def from_env() -> "WorkerLoopConfig": @@ -87,4 +132,8 @@ class WorkerConfig: - with env.prefixed("WORKER_"): - self.max_load_pct = env.int(name="MAX_LOAD_PCT", default=70) - self.max_memory_pct = env.int(name="MAX_MEMORY_PCT", default=80) - self.sleep_seconds = env.int(name="SLEEP_SECONDS", default=15) + with env.prefixed("WORKER_LOOP_"): + return WorkerLoopConfig( + max_disk_usage_pct=env.int(name="MAX_DISK_USAGE_PCT", default=WORKER_LOOP_MAX_DISK_USAGE_PCT), + max_load_pct=env.int(name="MAX_LOAD_PCT", default=WORKER_LOOP_MAX_LOAD_PCT), + max_memory_pct=env.int(name="MAX_MEMORY_PCT", default=WORKER_LOOP_MAX_MEMORY_PCT), + sleep_seconds=env.int(name="SLEEP_SECONDS", default=WORKER_LOOP_SLEEP_SECONDS), + storage_paths=env.list(name="STORAGE_PATHS", default=get_empty_str_list()), + ) @@ -92,0 +142 @@ class WorkerConfig: +@dataclass @@ -94,6 +144,2 @@ class ProcessingGraphConfig: - specification: ProcessingGraphSpecification - graph: ProcessingGraph - - def __init__(self): - # TODO: allow passing the graph via env vars - self.specification = { + specification: ProcessingGraphSpecification = field( + default_factory=lambda: { @@ -103,0 +150 @@ class ProcessingGraphConfig: + ) @@ -105,3 +152 @@ class ProcessingGraphConfig: - self.setup() - - def setup(self): + def __post_init__(self): @@ -108,0 +154,5 @@ class ProcessingGraphConfig: + + @staticmethod + def from_env() -> "ProcessingGraphConfig": + # TODO: allow passing the graph via env vars + return ProcessingGraphConfig() diff --git a/libs/libcommon/src/libcommon/processing_graph.py b/libs/libcommon/src/libcommon/processing_graph.py index 2e09d7bf..09090184 100644 --- a/libs/libcommon/src/libcommon/processing_graph.py +++ b/libs/libcommon/src/libcommon/processing_graph.py @@ -120,0 +121,4 @@ class ProcessingGraph: + def get_step_by_job_type(self, job_type: str) -> ProcessingStep: + # for now: the job_type is just an alias for the endpoint + return self.get_step(job_type) + diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index 6173edee..39bbbf73 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -60 +60 @@ class JobDict(TypedDict): -class StartedJobInfo(TypedDict): +class JobInfo(TypedDict): @@ -61,0 +62 @@ class StartedJobInfo(TypedDict): + type: str @@ -283 +284 @@ class Queue: - def start_job(self) -> StartedJobInfo: + def start_job(self) -> JobInfo: @@ -292 +293 @@ class Queue: - Returns: the job id and the input arguments: dataset, config and split + Returns: the job id, the type (endpoint), the input arguments: dataset, config and split and the force flag @@ -298,0 +300 @@ class Queue: + "type": self.type, diff --git a/libs/libcommon/src/libcommon/worker.py b/libs/libcommon/src/libcommon/worker.py index 052b2a00..595532dd 100644 --- a/libs/libcommon/src/libcommon/worker.py +++ b/libs/libcommon/src/libcommon/worker.py @@ -5,2 +4,0 @@ import logging -import random -import time @@ -12 +9,0 @@ from packaging import version -from psutil import cpu_count, getloadavg, swap_memory, virtual_memory @@ -14 +11 @@ from psutil import cpu_count, getloadavg, swap_memory, virtual_memory -from libcommon.config import CommonConfig, QueueConfig, WorkerConfig +from libcommon.config import CommonConfig @@ -18 +15 @@ from libcommon.processing_graph import ProcessingStep -from libcommon.queue import EmptyQueueError, Queue, Status +from libcommon.queue import JobInfo, Status @@ -106,2 +103,20 @@ class Worker(ABC): - processing_step: ProcessingStep - queue: Queue + """ + Base class for workers. A worker is a class that processes a job, for a specific processing step. + + It cannot be instantiated directly, but must be subclassed. + + Args: + job_info (:obj:`JobInfo`): + The job to process. It contains the job_id, the job type, the dataset, the config, the split + and the force flag. + common_config (:obj:`CommonConfig`): + The common config. + processing_step (:obj:`ProcessingStep`): + The processing step to process. + """ + + job_id: str + dataset: str + config: Optional[str] = None + split: Optional[str] = None + force: bool @@ -109,3 +124,11 @@ class Worker(ABC): - queue_config: QueueConfig - worker_config: WorkerConfig - version: str + processing_step: ProcessingStep + + @staticmethod + @abstractmethod + def get_job_type() -> str: + pass + + @staticmethod + @abstractmethod + def get_version() -> str: + pass @@ -115 +138 @@ class Worker(ABC): - processing_step: ProcessingStep, + job_info: JobInfo, @@ -117,3 +140 @@ class Worker(ABC): - queue_config: QueueConfig, - worker_config: WorkerConfig, - version: str, + processing_step: ProcessingStep, @@ -121 +142,6 @@ class Worker(ABC): - self.processing_step = processing_step + self.job_type = job_info["type"] + self.job_id = job_info["job_id"] + self.dataset = job_info["dataset"] + self.config = job_info["config"] + self.split = job_info["split"] + self.force = job_info["force"] @@ -123,3 +149 @@ class Worker(ABC): - self.queue_config = queue_config - self.worker_config = worker_config - self.version = version + self.processing_step = processing_step @@ -129,2 +153,15 @@ class Worker(ABC): - self.queue = Queue( - type=self.processing_step.job_type, max_jobs_per_namespace=self.queue_config.max_jobs_per_namespace + worker_job_type = self.get_job_type() + if self.processing_step.job_type != worker_job_type: + raise ValueError( + f"The processing step's job type is {self.processing_step.job_type}, but the worker only processes" + f" {worker_job_type}" + ) + if self.job_type != worker_job_type: + raise ValueError( + f"The submitted job type is {self.job_type}, but the worker only processes {worker_job_type}" + ) + + def __str__(self): + return ( + f"JobRunner(job_id={self.job_id} dataset={self.dataset} config={self.config}" + + f" split={self.split} force={self.force})" @@ -148,52 +185 @@ class Worker(ABC): - def has_memory(self) -> bool: - if self.worker_config.max_memory_pct <= 0: - return True - virtual_memory_used: int = virtual_memory().used # type: ignore - virtual_memory_total: int = virtual_memory().total # type: ignore - percent = (swap_memory().used + virtual_memory_used) / (swap_memory().total + virtual_memory_total) - ok = percent < self.worker_config.max_memory_pct - if not ok: - self.info( - f"memory usage (RAM + SWAP) is too high: {percent:.0f}% - max is {self.worker_config.max_memory_pct}%" - ) - return ok - - def has_cpu(self) -> bool: - if self.worker_config.max_load_pct <= 0: - return True - load_pct = max(getloadavg()[:2]) / cpu_count() * 100 - # ^ only current load and 5m load. 15m load is not relevant to decide to launch a new job - ok = load_pct < self.worker_config.max_load_pct - if not ok: - self.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.worker_config.max_load_pct}%") - return ok - - def has_storage(self) -> bool: - # placeholder, to be overridden by workers if needed - return True - - def has_resources(self) -> bool: - return self.has_memory() and self.has_cpu() and self.has_storage() - - def sleep(self) -> None: - jitter = 0.75 + random.random() / 2 # nosec - # ^ between 0.75 and 1.25 - duration = self.worker_config.sleep_seconds * jitter - self.debug(f"sleep during {duration:.2f} seconds") - time.sleep(duration) - - def loop(self) -> None: - try: - while True: - if self.has_resources() and self.process_next_job(): - # loop immediately to try another job - # see https://github.com/huggingface/datasets-server/issues/265 - continue - self.sleep() - except BaseException as e: - self.critical(f"quit due to an uncaught error while processing the job: {e}") - raise - - def process_next_job(self) -> bool: - self.debug("try to process a job") - + def run(self) -> Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED]: @@ -201,17 +187,5 @@ class Worker(ABC): - started_job_info = self.queue.start_job() - job_id = started_job_info["job_id"] - dataset = started_job_info["dataset"] - config = started_job_info["config"] - split = started_job_info["split"] - force = started_job_info["force"] - parameters_for_log = f"dataset={dataset}" + ("" if split is None else f"config={config} split={split}") - self.debug(f"job assigned: {job_id} for {parameters_for_log}") - except EmptyQueueError: - self.debug("no job in the queue") - return False - - finished_status: Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED] - try: - self.info(f"compute {parameters_for_log}") - if self.should_skip_job(dataset=dataset, config=config, split=split, force=force): - finished_status = Status.SKIPPED + self.info(f"compute {self}") + if self.should_skip_job(): + return Status.SKIPPED + elif self.process(): + return Status.SUCCESS @@ -219,5 +193 @@ class Worker(ABC): - finished_status = ( - Status.SUCCESS - if self.process(dataset=dataset, config=config, split=split, force=force) - else Status.ERROR - ) + return Status.ERROR @@ -225,6 +195,2 @@ class Worker(ABC): - self.exception(f"error while computing {parameters_for_log}") - finished_status = Status.ERROR - finally: - self.queue.finish_job(job_id=job_id, finished_status=finished_status) - self.debug(f"job finished with {finished_status.value}: {job_id} for {parameters_for_log}") - return True + self.exception(f"error while computing {self}") + return Status.ERROR @@ -246 +212 @@ class Worker(ABC): - return parse_version(self.version).major - parse_version(other_version).major + return parse_version(self.get_version()).major - parse_version(other_version).major @@ -250,6 +216 @@ class Worker(ABC): - def get_dataset_git_revision( - self, - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, - ) -> Optional[str]: + def get_dataset_git_revision(self) -> Optional[str]: @@ -257 +218,3 @@ class Worker(ABC): - return get_dataset_git_revision(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + return get_dataset_git_revision( + dataset=self.dataset, hf_endpoint=self.common_config.hf_endpoint, hf_token=self.common_config.hf_token + ) @@ -259,3 +222 @@ class Worker(ABC): - def should_skip_job( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> bool: + def should_skip_job(self) -> bool: @@ -271,6 +231,0 @@ class Worker(ABC): - Args: - dataset (:obj:`str`): The name of the dataset. - config (:obj:`str`, `optional`): The name of the configuration. - split (:obj:`str`, `optional`): The name of the split. - force (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to force the job to be run. - @@ -280 +235 @@ class Worker(ABC): - if force: + if self.force: @@ -284,4 +239 @@ class Worker(ABC): - kind=self.processing_step.cache_kind, dataset=dataset, config=config, split=split - ) - dataset_git_revision = self.get_dataset_git_revision( - dataset=dataset, hf_endpoint=self.common_config.hf_endpoint, hf_token=self.common_config.hf_token + kind=self.processing_step.cache_kind, dataset=self.dataset, config=self.config, split=self.split @@ -288,0 +241 @@ class Worker(ABC): + dataset_git_revision = self.get_dataset_git_revision() @@ -302,4 +254,0 @@ class Worker(ABC): - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, @@ -309,3 +258 @@ class Worker(ABC): - dataset_git_revision = self.get_dataset_git_revision( - dataset=dataset, hf_endpoint=self.common_config.hf_endpoint, hf_token=self.common_config.hf_token - ) + dataset_git_revision = self.get_dataset_git_revision() @@ -313,2 +260,2 @@ class Worker(ABC): - self.debug(f"the dataset={dataset} has no git revision, don't update the cache") - raise NoGitRevisionError(f"Could not get git revision for dataset {dataset}") + self.debug(f"the dataset={self.dataset} has no git revision, don't update the cache") + raise NoGitRevisionError(f"Could not get git revision for dataset {self.dataset}") @@ -316,2 +263,2 @@ class Worker(ABC): - self.pre_compute(dataset=dataset, config=config, split=split, force=force) - content = self.compute(dataset=dataset, config=config, split=split, force=force) + self.pre_compute() + content = self.compute() @@ -320 +267 @@ class Worker(ABC): - self.post_compute(dataset=dataset, config=config, split=split, force=force) + self.post_compute() @@ -323,3 +270,3 @@ class Worker(ABC): - dataset=dataset, - config=config, - split=split, + dataset=self.dataset, + config=self.config, + split=self.split, @@ -328 +275 @@ class Worker(ABC): - worker_version=self.version, + worker_version=self.get_version(), @@ -331 +278 @@ class Worker(ABC): - self.debug(f"dataset={dataset} config={config} split={split} is valid, cache updated") + self.debug(f"dataset={self.dataset} config={self.config} split={self.split} is valid, cache updated") @@ -340 +287,2 @@ class Worker(ABC): - f"the dataset={dataset}, config {config} or split {split} could not be found, don't update the cache" + f"the dataset={self.dataset}, config {self.config} or split {self.split} could not be found, don't" + " update the cache" @@ -347,3 +295,3 @@ class Worker(ABC): - dataset=dataset, - config=config, - split=split, + dataset=self.dataset, + config=self.config, + split=self.split, @@ -354 +302 @@ class Worker(ABC): - worker_version=self.version, + worker_version=self.get_version(), @@ -357 +305,4 @@ class Worker(ABC): - self.debug(f"response for dataset={dataset} config={config} split={split} had an error, cache updated") + self.debug( + f"response for dataset={self.dataset} config={self.config} split={self.split} had an error, cache" + " updated" + ) @@ -360,7 +311 @@ class Worker(ABC): - def pre_compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> None: + def pre_compute(self) -> None: @@ -371,7 +316 @@ class Worker(ABC): - def compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> Mapping[str, Any]: + def compute(self) -> Mapping[str, Any]: @@ -380,7 +319 @@ class Worker(ABC): - def post_compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> None: + def post_compute(self) -> None: @@ -388,0 +322,15 @@ class Worker(ABC): + + +class WorkerFactory(ABC): + """ + Base class for worker factories. A worker factory is a class that creates a worker. + + It cannot be instantiated directly, but must be subclassed. + """ + + def create_worker(self, job_info: JobInfo) -> Worker: + return self._create_worker(job_info=job_info) + + @abstractmethod + def _create_worker(self, job_info: JobInfo) -> Worker: + pass diff --git a/libs/libcommon/src/libcommon/worker_loop.py b/libs/libcommon/src/libcommon/worker_loop.py new file mode 100644 index 00000000..d5e1e445 --- /dev/null +++ b/libs/libcommon/src/libcommon/worker_loop.py @@ -0,0 +1,124 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +import random +import time +from dataclasses import dataclass + +from psutil import cpu_count, disk_usage, getloadavg, swap_memory, virtual_memory + +from libcommon.config import WorkerLoopConfig +from libcommon.queue import EmptyQueueError, Queue +from libcommon.worker import WorkerFactory + + +@dataclass +class WorkerLoop: + """ + A worker loop gets jobs from a queue and processes them. + + Once initialized, the worker loop can be started with the `loop` method and will run until an uncaught exception + is raised. + + Args: + queue (`Queue`): + The job queue. + worker_factory (`WorkerFactory`): + The worker factory that will create a worker for each job. Must be able to process the jobs of the queue. + worker_loop_config (`WorkerLoopConfig`): + Worker loop configuration. + """ + + queue: Queue + worker_factory: WorkerFactory + worker_loop_config: WorkerLoopConfig + + def log(self, level: int, msg: str) -> None: + logging.log(level=level, msg=f"[{self.queue.type}] {msg}") + + def debug(self, msg: str) -> None: + self.log(level=logging.DEBUG, msg=msg) + + def info(self, msg: str) -> None: + self.log(level=logging.INFO, msg=msg) + + def critical(self, msg: str) -> None: + self.log(level=logging.CRITICAL, msg=msg) + + def exception(self, msg: str) -> None: + self.log(level=logging.ERROR, msg=msg) + + def has_memory(self) -> bool: + if self.worker_loop_config.max_memory_pct <= 0: + return True + virtual_memory_used: int = virtual_memory().used # type: ignore + virtual_memory_total: int = virtual_memory().total # type: ignore + percent = (swap_memory().used + virtual_memory_used) / (swap_memory().total + virtual_memory_total) + ok = percent < self.worker_loop_config.max_memory_pct + if not ok: + self.info( + f"memory usage (RAM + SWAP) is too high: {percent:.0f}% - max is" + f" {self.worker_loop_config.max_memory_pct}%" + ) + return ok + + def has_cpu(self) -> bool: + if self.worker_loop_config.max_load_pct <= 0: + return True + load_pct = max(getloadavg()[:2]) / cpu_count() * 100 + # ^ only current load and 5m load. 15m load is not relevant to decide to launch a new job + ok = load_pct < self.worker_loop_config.max_load_pct + if not ok: + self.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.worker_loop_config.max_load_pct}%") + return ok + + def has_storage(self) -> bool: + if self.worker_loop_config.max_disk_usage_pct <= 0: + return True + for path in self.worker_loop_config.storage_paths: + try: + usage = disk_usage(path) + if usage.percent >= self.worker_loop_config.max_disk_usage_pct: + return False + except Exception: + # if we can't get the disk usage, we let the process continue + return True + return True + + def has_resources(self) -> bool: + return self.has_memory() and self.has_cpu() and self.has_storage() + + def sleep(self) -> None: + jitter = 0.75 + random.random() / 2 # nosec + # ^ between 0.75 and 1.25 + duration = self.worker_loop_config.sleep_seconds * jitter + self.debug(f"sleep during {duration:.2f} seconds") + time.sleep(duration) + + def loop(self) -> None: + try: + while True: + if self.has_resources() and self.process_next_job(): + # loop immediately to try another job + # see https://github.com/huggingface/datasets-server/issues/265 + continue + self.sleep() + except BaseException as e: + self.critical(f"quit due to an uncaught error while processing the job: {e}") + raise + + def process_next_job(self) -> bool: + self.debug("try to process a job") + + try: + worker = self.worker_factory.create_worker(self.queue.start_job()) + self.debug(f"job assigned: {worker}") + except EmptyQueueError: + self.debug("no job in the queue") + return False + + finished_status = worker.run() + self.queue.finish_job(job_id=worker.job_id, finished_status=finished_status) + self.debug(f"job finished with {finished_status.value}: {worker}") + return True diff --git a/libs/libcommon/tests/conftest.py b/libs/libcommon/tests/conftest.py index 204ccd2e..d1d4199b 100644 --- a/libs/libcommon/tests/conftest.py +++ b/libs/libcommon/tests/conftest.py @@ -4 +4,2 @@ -from pytest import MonkeyPatch, fixture +from environs import Env +from pytest import fixture @@ -6 +7 @@ from pytest import MonkeyPatch, fixture -from libcommon.config import CacheConfig, CommonConfig, QueueConfig, WorkerConfig +from libcommon.config import CacheConfig, QueueConfig @@ -9,14 +10,2 @@ from libcommon.processing_graph import ProcessingStep - -@fixture(scope="session") -def common_config(): - return CommonConfig() - - -# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 -@fixture(scope="session") -def monkeypatch_session(): - monkeypatch_session = MonkeyPatch() - monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") - monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") - yield monkeypatch_session - monkeypatch_session.undo() +# CACHE_MONGO_URL and QUEUE_MONGO_URL must be set in the environment, and correspond to a running mongo database +env = Env(expand_vars=True) @@ -26,2 +15,2 @@ def monkeypatch_session(): -def cache_config(monkeypatch_session: MonkeyPatch) -> CacheConfig: - cache_config = CacheConfig() +def cache_config() -> CacheConfig: + cache_config = CacheConfig(mongo_database="datasets_server_cache_test", mongo_url=env.str("CACHE_MONGO_URL")) @@ -34,2 +23,2 @@ def cache_config(monkeypatch_session: MonkeyPatch) -> CacheConfig: -def queue_config(monkeypatch_session: MonkeyPatch) -> QueueConfig: - queue_config = QueueConfig() +def queue_config() -> QueueConfig: + queue_config = QueueConfig(mongo_database="datasets_server_queue_test", mongo_url=env.str("QUEUE_MONGO_URL")) @@ -41,5 +29,0 @@ def queue_config(monkeypatch_session: MonkeyPatch) -> QueueConfig: -@fixture(scope="session", autouse=True) -def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: - return WorkerConfig() - - @@ -47 +31 @@ def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: -def test_processing_step(monkeypatch_session: MonkeyPatch) -> ProcessingStep: +def test_processing_step() -> ProcessingStep: @@ -49 +33 @@ def test_processing_step(monkeypatch_session: MonkeyPatch) -> ProcessingStep: - endpoint="/test", + endpoint="/dummy", diff --git a/libs/libcommon/tests/test_config.py b/libs/libcommon/tests/test_config.py index 721ace56..8dab322e 100644 --- a/libs/libcommon/tests/test_config.py +++ b/libs/libcommon/tests/test_config.py @@ -7 +7,2 @@ from libcommon.config import CommonConfig -def test_common_config(common_config: CommonConfig): +def test_common_config(): + common_config = CommonConfig() diff --git a/libs/libcommon/tests/test_queue.py b/libs/libcommon/tests/test_queue.py index 130f9889..90280c70 100644 --- a/libs/libcommon/tests/test_queue.py +++ b/libs/libcommon/tests/test_queue.py @@ -8,0 +9 @@ import pytest +from libcommon.config import QueueConfig @@ -13 +14 @@ from libcommon.queue import EmptyQueueError, Queue, Status, _clean_queue_databas -def clean_mongo_database() -> None: +def clean_mongo_database(queue_config: QueueConfig) -> None: @@ -28,5 +29,6 @@ def test_add_job() -> None: - started_job_info = queue.start_job() - assert started_job_info["dataset"] == test_dataset - assert started_job_info["config"] is None - assert started_job_info["split"] is None - assert started_job_info["force"] is True + job_info = queue.start_job() + assert job_info["type"] == test_type + assert job_info["dataset"] == test_dataset + assert job_info["config"] is None + assert job_info["split"] is None + assert job_info["force"] is True @@ -41 +43 @@ def test_add_job() -> None: - queue.finish_job(job_id=started_job_info["job_id"], finished_status=Status.SUCCESS) + queue.finish_job(job_id=job_info["job_id"], finished_status=Status.SUCCESS) @@ -45,3 +47,3 @@ def test_add_job() -> None: - started_job_info = queue.start_job() - assert started_job_info["force"] is False - queue.finish_job(job_id=started_job_info["job_id"], finished_status=Status.SUCCESS) + job_info = queue.start_job() + assert job_info["force"] is False + queue.finish_job(job_id=job_info["job_id"], finished_status=Status.SUCCESS) @@ -49,3 +51,3 @@ def test_add_job() -> None: - started_job_info = queue.start_job() - assert started_job_info["force"] is True - other_job_id = ("1" if started_job_info["job_id"][0] == "0" else "0") + started_job_info["job_id"][1:] + job_info = queue.start_job() + assert job_info["force"] is True + other_job_id = ("1" if job_info["job_id"][0] == "0" else "0") + job_info["job_id"][1:] @@ -55 +57 @@ def test_add_job() -> None: - queue.finish_job(job_id=started_job_info["job_id"], finished_status=Status.SUCCESS) + queue.finish_job(job_id=job_info["job_id"], finished_status=Status.SUCCESS) @@ -62,0 +65,6 @@ def test_add_job() -> None: +def check_job(queue: Queue, expected_dataset: str, expected_split: str) -> None: + job_info = queue.start_job() + assert job_info["dataset"] == expected_dataset + assert job_info["split"] == expected_split + + @@ -73,18 +81,6 @@ def test_priority_to_non_started_datasets() -> None: - started_job_info = queue.start_job() - assert started_job_info["dataset"] == "dataset1" - assert started_job_info["split"] == "split1" - started_job_info = queue.start_job() - assert started_job_info["dataset"] == "dataset2" - assert started_job_info["split"] == "split1" - started_job_info = queue.start_job() - assert started_job_info["dataset"] == "dataset3" - assert started_job_info["split"] == "split1" - started_job_info = queue.start_job() - assert started_job_info["dataset"] == "dataset1/dataset" - assert started_job_info["split"] == "split1" - started_job_info = queue.start_job() - assert started_job_info["dataset"] == "dataset2" - assert started_job_info["split"] == "split2" - started_job_info = queue.start_job() - assert started_job_info["dataset"] == "dataset1" - assert started_job_info["split"] == "split2" + check_job(queue=queue, expected_dataset="dataset1", expected_split="split1") + check_job(queue=queue, expected_dataset="dataset2", expected_split="split1") + check_job(queue=queue, expected_dataset="dataset3", expected_split="split1") + check_job(queue=queue, expected_dataset="dataset1/dataset", expected_split="split1") + check_job(queue=queue, expected_dataset="dataset2", expected_split="split2") + check_job(queue=queue, expected_dataset="dataset1", expected_split="split2") @@ -108,4 +104,4 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - started_job_info = queue.start_job() - assert started_job_info["dataset"] == test_dataset - assert started_job_info["config"] == test_config - assert started_job_info["split"] == "split1" + job_info = queue.start_job() + assert job_info["dataset"] == test_dataset + assert job_info["config"] == test_config + assert job_info["split"] == "split1" @@ -117,2 +113,2 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - started_job_info_2 = queue.start_job() - assert started_job_info_2["split"] == "split2" + job_info_2 = queue.start_job() + assert job_info_2["split"] == "split2" @@ -125 +121 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - queue.finish_job(started_job_info["job_id"], finished_status=Status.SUCCESS) + queue.finish_job(job_info["job_id"], finished_status=Status.SUCCESS) @@ -163,3 +159,3 @@ def test_get_total_duration_per_dataset() -> None: - started_job_info = queue.start_job() - started_job_info_2 = queue.start_job() - started_job_info_3 = queue.start_job() + job_info = queue.start_job() + job_info_2 = queue.start_job() + job_info_3 = queue.start_job() @@ -170,3 +166,3 @@ def test_get_total_duration_per_dataset() -> None: - queue.finish_job(started_job_info["job_id"], finished_status=Status.SUCCESS) - queue.finish_job(started_job_info_2["job_id"], finished_status=Status.ERROR) - queue.finish_job(started_job_info_3["job_id"], finished_status=Status.SUCCESS) + queue.finish_job(job_info["job_id"], finished_status=Status.SUCCESS) + queue.finish_job(job_info_2["job_id"], finished_status=Status.ERROR) + queue.finish_job(job_info_3["job_id"], finished_status=Status.SUCCESS) diff --git a/libs/libcommon/tests/test_simple_cache.py b/libs/libcommon/tests/test_simple_cache.py index e11459e3..afa12c23 100644 --- a/libs/libcommon/tests/test_simple_cache.py +++ b/libs/libcommon/tests/test_simple_cache.py @@ -10,0 +11 @@ from pymongo.errors import DocumentTooLarge +from libcommon.config import CacheConfig @@ -31 +32 @@ from libcommon.simple_cache import ( -def clean_mongo_database() -> None: +def clean_mongo_database(cache_config: CacheConfig) -> None: diff --git a/libs/libcommon/tests/test_worker.py b/libs/libcommon/tests/test_worker.py index 33c8d953..f5111f33 100644 --- a/libs/libcommon/tests/test_worker.py +++ b/libs/libcommon/tests/test_worker.py @@ -5 +5 @@ import pytest -from libcommon.config import CommonConfig, QueueConfig, WorkerConfig +from libcommon.config import CommonConfig @@ -20,6 +20 @@ class DummyWorker(Worker): - def get_dataset_git_revision( - self, - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, - ) -> Optional[str]: + def get_dataset_git_revision(self) -> Optional[str]: @@ -28,4 +23,3 @@ class DummyWorker(Worker): - def compute( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> Mapping[str, Any]: - return {"key": "value"} + @staticmethod + def get_job_type() -> str: + return "/dummy" @@ -32,0 +27,3 @@ class DummyWorker(Worker): + @staticmethod + def get_version() -> str: + return "1.0.1" @@ -34,3 +31,2 @@ class DummyWorker(Worker): -class NoStorageWorker(DummyWorker): - def has_storage(self) -> bool: - return False + def compute(self) -> Mapping[str, Any]: + return {"key": "value"} @@ -56,24 +51,0 @@ def test_parse_version(string_version: str, expected_major_version: int, should_ -def test_has_storage( - test_processing_step: ProcessingStep, - common_config: CommonConfig, - queue_config: QueueConfig, - worker_config: WorkerConfig, -) -> None: - worker = DummyWorker( - processing_step=test_processing_step, - common_config=common_config, - queue_config=queue_config, - worker_config=worker_config, - version="1.0.0", - ) - assert worker.has_storage() is True - worker = NoStorageWorker( - processing_step=test_processing_step, - common_config=common_config, - queue_config=queue_config, - worker_config=worker_config, - version="1.0.0", - ) - assert worker.has_storage() is False - - @@ -81 +53 @@ def test_has_storage( - "worker_version, other_version, expected, should_raise", + "other_version, expected, should_raise", @@ -83,4 +55,4 @@ def test_has_storage( - ("1.0.0", "1.0.1", 0, False), - ("1.0.0", "2.0.1", -1, False), - ("2.0.0", "1.0.1", 1, False), - ("not a version", "1.0.1", None, True), + ("1.0.0", 0, False), + ("0.1.0", 1, False), + ("2.0.0", -1, False), + ("not a version", None, True), @@ -91,4 +62,0 @@ def test_compare_major_version( - common_config: CommonConfig, - queue_config: QueueConfig, - worker_config: WorkerConfig, - worker_version: str, @@ -98,0 +67,5 @@ def test_compare_major_version( + job_id = "job_id" + dataset = "dataset" + config = "config" + split = "split" + force = False @@ -99,0 +73,8 @@ def test_compare_major_version( + job_info={ + "job_id": job_id, + "type": test_processing_step.job_type, + "dataset": dataset, + "config": config, + "split": split, + "force": force, + }, @@ -101,4 +82 @@ def test_compare_major_version( - common_config=common_config, - queue_config=queue_config, - worker_config=worker_config, - version=worker_version, + common_config=CommonConfig(), @@ -115,3 +92,0 @@ def test_should_skip_job( - common_config: CommonConfig, - queue_config: QueueConfig, - worker_config: WorkerConfig, @@ -118,0 +94,5 @@ def test_should_skip_job( + job_id = "job_id" + dataset = "dataset" + config = "config" + split = "split" + force = False @@ -119,0 +100,8 @@ def test_should_skip_job( + job_info={ + "job_id": job_id, + "type": test_processing_step.job_type, + "dataset": dataset, + "config": config, + "split": split, + "force": force, + }, @@ -121,4 +109 @@ def test_should_skip_job( - common_config=common_config, - queue_config=queue_config, - worker_config=worker_config, - version="1.0.0", + common_config=CommonConfig(), @@ -125,0 +111,10 @@ def test_should_skip_job( + assert worker.should_skip_job() is False + # we add an entry to the cache + worker.process() + assert worker.should_skip_job() is True + + +def test_check_type( + test_processing_step: ProcessingStep, +) -> None: + job_id = "job_id" @@ -129,10 +124,39 @@ def test_should_skip_job( - - assert worker.should_skip_job(dataset=dataset, config=config, split=split) is False - # we add an entry to the cache - worker.process(dataset=dataset, config=config, split=split) - assert worker.should_skip_job(dataset=dataset, config=config, split=split) is True - - assert worker.should_skip_job(dataset=dataset) is False - # we add an entry to the cache - worker.process(dataset=dataset) - assert worker.should_skip_job(dataset=dataset) is True + force = False + + job_type = f"not-{test_processing_step.job_type}" + with pytest.raises(ValueError): + DummyWorker( + job_info={ + "job_id": job_id, + "type": job_type, + "dataset": dataset, + "config": config, + "split": split, + "force": force, + }, + processing_step=test_processing_step, + common_config=CommonConfig(), + ) + + another_processing_step = ProcessingStep( + endpoint=f"not-{test_processing_step.endpoint}", + input_type="dataset", + requires=None, + required_by_dataset_viewer=False, + parent=None, + ancestors=[], + children=[], + ) + with pytest.raises(ValueError): + DummyWorker( + job_info={ + "job_id": job_id, + "type": test_processing_step.job_type, + "dataset": dataset, + "config": config, + "split": split, + "force": force, + }, + processing_step=another_processing_step, + common_config=CommonConfig(), + ) diff --git a/libs/libcommon/tests/test_worker_loop.py b/libs/libcommon/tests/test_worker_loop.py new file mode 100644 index 00000000..df6e8535 --- /dev/null +++ b/libs/libcommon/tests/test_worker_loop.py @@ -0,0 +1,63 @@ +from typing import Any, Mapping, Optional + +import pytest + +from libcommon.config import CommonConfig, QueueConfig, WorkerLoopConfig +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue, _clean_queue_database +from libcommon.simple_cache import _clean_cache_database +from libcommon.worker import JobInfo, Worker, WorkerFactory +from libcommon.worker_loop import WorkerLoop + + [email protected](autouse=True) +def clean_mongo_database() -> None: + _clean_queue_database() + _clean_cache_database() + + +class DummyWorker(Worker): + # override get_dataset_git_revision to avoid making a request to the Hub + def get_dataset_git_revision(self) -> Optional[str]: + return "0.1.2" + + @staticmethod + def get_job_type() -> str: + return "/dummy" + + @staticmethod + def get_version() -> str: + return "1.0.1" + + def compute(self) -> Mapping[str, Any]: + return {"key": "value"} + + +class DummyWorkerFactory(WorkerFactory): + def __init__(self, processing_step: ProcessingStep) -> None: + self.common_config = CommonConfig() + self.processing_step = processing_step + + def _create_worker(self, job_info: JobInfo) -> Worker: + return DummyWorker(job_info=job_info, common_config=self.common_config, processing_step=self.processing_step) + + +def test_process_next_job( + test_processing_step: ProcessingStep, + queue_config: QueueConfig, +) -> None: + worker_factory = DummyWorkerFactory(processing_step=test_processing_step) + queue = Queue(type=test_processing_step.endpoint, max_jobs_per_namespace=queue_config.max_jobs_per_namespace) + worker_loop = WorkerLoop( + worker_factory=worker_factory, + queue=queue, + worker_loop_config=WorkerLoopConfig(), + ) + assert worker_loop.process_next_job() is False + dataset = "dataset" + config = "config" + split = "split" + worker_loop.queue.add_job(dataset=dataset, config=config, split=split) + worker_loop.queue.is_job_in_process(dataset=dataset, config=config, split=split) is True + assert worker_loop.process_next_job() is True + worker_loop.queue.is_job_in_process(dataset=dataset, config=config, split=split) is False diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index f2b25760..d575e00a 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -356 +356 @@ name = "libcommon" -version = "0.5.11" +version = "0.6.0" @@ -374 +374 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" @@ -999 +999 @@ python-versions = "3.9.15" -content-hash = "850ad555d794822b0f61abf3822eb23ce71d671e9443e61e2221dbddae691c4b" +content-hash = "24b868926cddaabeffd1ebc8940dc3988ca9b7e0c56145624162839bd2997f5f" @@ -1178 +1178 @@ libcommon = [ - {file = "libcommon-0.5.11-py3-none-any.whl", hash = "sha256:de1a625049e96df154ac31750137bf5e151a6083212c2950dc05b7919beda606"}, + {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index edd53ef3..fcb01148 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index c3c17e7f..7be2df98 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -23 +23 @@ def create_app() -> Starlette: - app_config = AppConfig() + app_config = AppConfig.from_env() @@ -46 +46 @@ def create_app() -> Starlette: - external_auth_url=app_config.admin.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -58 +58 @@ def create_app() -> Starlette: - external_auth_url=app_config.admin.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -72 +72 @@ def create_app() -> Starlette: - external_auth_url=app_config.admin.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -83 +83 @@ def create_app() -> Starlette: - external_auth_url=app_config.admin.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -96 +96 @@ def create_app() -> Starlette: - external_auth_url=app_config.admin.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -107 +107 @@ def start() -> None: - uvicorn_config = UvicornConfig() + uvicorn_config = UvicornConfig.from_env() diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index c5b1cfd4..d2410b3d 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -3,0 +4 @@ +from dataclasses import dataclass, field @@ -14,0 +16,3 @@ from libcommon.config import ( +ADMIN_UVICORN_HOSTNAME = "localhost" +ADMIN_UVICORN_NUM_WORKERS = 2 +ADMIN_UVICORN_PORT = 8000 @@ -15,0 +20,2 @@ from libcommon.config import ( + +@dataclass @@ -17,3 +23,3 @@ class UvicornConfig: - hostname: str - num_workers: int - port: int + hostname: str = ADMIN_UVICORN_HOSTNAME + num_workers: int = ADMIN_UVICORN_NUM_WORKERS + port: int = ADMIN_UVICORN_PORT @@ -21 +27,2 @@ class UvicornConfig: - def __init__(self): + @staticmethod + def from_env() -> "UvicornConfig": @@ -24,3 +31,11 @@ class UvicornConfig: - self.hostname = env.str(name="HOSTNAME", default="localhost") - self.num_workers = env.int(name="NUM_WORKERS", default=2) - self.port = env.int(name="PORT", default=8000) + return UvicornConfig( + hostname=env.str(name="HOSTNAME", default=ADMIN_UVICORN_HOSTNAME), + num_workers=env.int(name="NUM_WORKERS", default=ADMIN_UVICORN_NUM_WORKERS), + port=env.int(name="PORT", default=ADMIN_UVICORN_PORT), + ) + + +ADMIN_CACHE_REPORTS_NUM_RESULTS = 100 +ADMIN_HF_ORGANIZATION = None +ADMIN_HF_WHOAMI_PATH = "/api/whoami-v2" +ADMIN_MAX_AGE = 10 @@ -28,0 +44 @@ class UvicornConfig: +@dataclass @@ -30,5 +46,4 @@ class AdminConfig: - cache_reports_num_results: int - external_auth_url: str - hf_organization: Optional[str] - hf_whoami_path: str - max_age: int + cache_reports_num_results: int = ADMIN_CACHE_REPORTS_NUM_RESULTS + hf_organization: Optional[str] = ADMIN_HF_ORGANIZATION + hf_whoami_path: str = ADMIN_HF_WHOAMI_PATH + max_age: int = ADMIN_MAX_AGE @@ -36 +51,2 @@ class AdminConfig: - def __init__(self, hf_endpoint: str): + @staticmethod + def from_env() -> "AdminConfig": @@ -39,6 +55,8 @@ class AdminConfig: - hf_organization = env.str(name="HF_ORGANIZATION", default="") - self.hf_organization = None if hf_organization == "" else hf_organization - self.cache_reports_num_results = env.int(name="CACHE_REPORTS_NUM_RESULTS", default=100) - self.hf_whoami_path = env.str(name="HF_WHOAMI_PATH", default="/api/whoami-v2") - self.max_age = env.int(name="MAX_AGE", default=10) # 10 seconds - self.external_auth_url = None if self.hf_whoami_path is None else f"{hf_endpoint}{self.hf_whoami_path}" + return AdminConfig( + cache_reports_num_results=env.int( + name="CACHE_REPORTS_NUM_RESULTS", default=ADMIN_CACHE_REPORTS_NUM_RESULTS + ), + hf_organization=env.str(name="HF_ORGANIZATION", default=ADMIN_HF_ORGANIZATION), + hf_whoami_path=env.str(name="HF_WHOAMI_PATH", default=ADMIN_HF_WHOAMI_PATH), + max_age=env.int(name="MAX_AGE", default=ADMIN_MAX_AGE), + ) @@ -46,0 +65 @@ class AdminConfig: +@dataclass @@ -48,8 +67,14 @@ class AppConfig: - admin: AdminConfig - assets: AssetsConfig - cache: CacheConfig - common: CommonConfig - processing_graph: ProcessingGraphConfig - queue: QueueConfig - - def __init__(self): + admin: AdminConfig = field(default_factory=AdminConfig) + assets: AssetsConfig = field(default_factory=AssetsConfig) + cache: CacheConfig = field(default_factory=CacheConfig) + common: CommonConfig = field(default_factory=CommonConfig) + processing_graph: ProcessingGraphConfig = field(default_factory=ProcessingGraphConfig) + queue: QueueConfig = field(default_factory=QueueConfig) + + def __post_init__(self): + self.external_auth_url = ( + None if self.admin.hf_whoami_path is None else f"{self.common.hf_endpoint}{self.admin.hf_whoami_path}" + ) + + @staticmethod + def from_env() -> "AppConfig": @@ -57,6 +82,8 @@ class AppConfig: - self.common = CommonConfig() - self.assets = AssetsConfig() - self.cache = CacheConfig() - self.processing_graph = ProcessingGraphConfig() - self.queue = QueueConfig() - self.admin = AdminConfig(hf_endpoint=self.common.hf_endpoint) + return AppConfig( + common=CommonConfig.from_env(), + assets=AssetsConfig.from_env(), + cache=CacheConfig.from_env(), + processing_graph=ProcessingGraphConfig.from_env(), + queue=QueueConfig.from_env(), + admin=AdminConfig.from_env(), + ) diff --git a/services/admin/tests/conftest.py b/services/admin/tests/conftest.py index 128e5a56..d515e78d 100644 --- a/services/admin/tests/conftest.py +++ b/services/admin/tests/conftest.py @@ -29 +29 @@ def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: - app_config = AppConfig() + app_config = AppConfig.from_env() diff --git a/services/admin/tests/test_app_real.py b/services/admin/tests/test_app_real.py index f69d66c9..06d860a5 100644 --- a/services/admin/tests/test_app_real.py +++ b/services/admin/tests/test_app_real.py @@ -32 +32 @@ def real_app_config(real_monkeypatch: MonkeyPatch) -> AppConfig: - app_config = AppConfig() + app_config = AppConfig.from_env() diff --git a/services/api/README.md b/services/api/README.md index 8ecbc695..1fe75f77 100644 --- a/services/api/README.md +++ b/services/api/README.md @@ -42,0 +43 @@ See https://huggingface.co/docs/datasets-server +- /parquet: list the parquet files auto-converted for a dataset diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 2d596907..814cead1 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -372 +372 @@ name = "libcommon" -version = "0.5.10" +version = "0.6.0" @@ -390 +390 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" @@ -1033 +1033 @@ python-versions = "3.9.15" -content-hash = "0288367cb9b868444331cdbdf2eb6095c5d939a475811c612e30f4cdb7aa8785" +content-hash = "7a0e0c8fb2bb502ec2951099fa3eb74fc3b5e3534e63b60390ad29784cb9aec7" @@ -1216 +1216 @@ libcommon = [ - {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, + {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index b857ffa4..4c73d5e3 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } diff --git a/services/api/src/api/app.py b/services/api/src/api/app.py index 6cf24221..6ada8330 100644 --- a/services/api/src/api/app.py +++ b/services/api/src/api/app.py @@ -23 +23 @@ def create_app() -> Starlette: - app_config = AppConfig() + app_config = AppConfig.from_env() @@ -45 +45 @@ def create_app() -> Starlette: - external_auth_url=app_config.api.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -61 +61 @@ def create_app() -> Starlette: - external_auth_url=app_config.api.external_auth_url, + external_auth_url=app_config.external_auth_url, @@ -90 +90 @@ def start() -> None: - uvicorn_config = UvicornConfig() + uvicorn_config = UvicornConfig.from_env() diff --git a/services/api/src/api/config.py b/services/api/src/api/config.py index 928458bc..80ef9a99 100644 --- a/services/api/src/api/config.py +++ b/services/api/src/api/config.py @@ -4 +4 @@ -from typing import Optional +from dataclasses import dataclass, field @@ -13,0 +14,3 @@ from libcommon.config import ( +API_UVICORN_HOSTNAME = "localhost" +API_UVICORN_NUM_WORKERS = 2 +API_UVICORN_PORT = 8000 @@ -14,0 +18,2 @@ from libcommon.config import ( + +@dataclass @@ -16,3 +21,3 @@ class UvicornConfig: - hostname: str - num_workers: int - port: int + hostname: str = API_UVICORN_HOSTNAME + num_workers: int = API_UVICORN_NUM_WORKERS + port: int = API_UVICORN_PORT @@ -20 +25,2 @@ class UvicornConfig: - def __init__(self): + @staticmethod + def from_env() -> "UvicornConfig": @@ -23,3 +29,6 @@ class UvicornConfig: - self.hostname = env.str(name="HOSTNAME", default="localhost") - self.num_workers = env.int(name="NUM_WORKERS", default=2) - self.port = env.int(name="PORT", default=8000) + return UvicornConfig( + hostname=env.str(name="HOSTNAME", default=API_UVICORN_HOSTNAME), + num_workers=env.int(name="NUM_WORKERS", default=API_UVICORN_NUM_WORKERS), + port=env.int(name="PORT", default=API_UVICORN_PORT), + ) + @@ -26,0 +36,3 @@ class UvicornConfig: +API_HF_AUTH_PATH = "/api/datasets/%s/auth-check" +API_MAX_AGE_LONG = 120 # 2 minutes +API_MAX_AGE_SHORT = 10 # 10 seconds @@ -27,0 +40,2 @@ class UvicornConfig: + +@dataclass @@ -29,4 +43,3 @@ class ApiConfig: - external_auth_url: Optional[str] - hf_auth_path: str - max_age_long: int - max_age_short: int + hf_auth_path: str = API_HF_AUTH_PATH + max_age_long: int = API_MAX_AGE_LONG + max_age_short: int = API_MAX_AGE_SHORT @@ -34 +47,2 @@ class ApiConfig: - def __init__(self, hf_endpoint: str): + @staticmethod + def from_env() -> "ApiConfig": @@ -37,4 +51,5 @@ class ApiConfig: - self.hf_auth_path = env.str(name="HF_AUTH_PATH", default="/api/datasets/%s/auth-check") - self.max_age_long = env.int(name="MAX_AGE_LONG", default=120) # 2 minutes - self.max_age_short = env.int(name="MAX_AGE_SHORT", default=10) # 10 seconds - self.external_auth_url = None if self.hf_auth_path is None else f"{hf_endpoint}{self.hf_auth_path}" + return ApiConfig( + hf_auth_path=env.str(name="HF_AUTH_PATH", default=API_HF_AUTH_PATH), + max_age_long=env.int(name="MAX_AGE_LONG", default=API_MAX_AGE_LONG), + max_age_short=env.int(name="MAX_AGE_SHORT", default=API_MAX_AGE_SHORT), + ) @@ -42,0 +58 @@ class ApiConfig: +@dataclass @@ -44,5 +60,10 @@ class AppConfig: - api: ApiConfig - cache: CacheConfig - common: CommonConfig - queue: QueueConfig - processing_graph: ProcessingGraphConfig + api: ApiConfig = field(default_factory=ApiConfig) + cache: CacheConfig = field(default_factory=CacheConfig) + common: CommonConfig = field(default_factory=CommonConfig) + queue: QueueConfig = field(default_factory=QueueConfig) + processing_graph: ProcessingGraphConfig = field(default_factory=ProcessingGraphConfig) + + def __post_init__(self): + self.external_auth_url = ( + None if self.api.hf_auth_path is None else f"{self.common.hf_endpoint}{self.api.hf_auth_path}" + ) @@ -50 +71,2 @@ class AppConfig: - def __init__(self): + @staticmethod + def from_env() -> "AppConfig": @@ -52,5 +74,7 @@ class AppConfig: - self.common = CommonConfig() - self.cache = CacheConfig() - self.processing_graph = ProcessingGraphConfig() - self.queue = QueueConfig() - self.api = ApiConfig(hf_endpoint=self.common.hf_endpoint) + return AppConfig( + common=CommonConfig.from_env(), + cache=CacheConfig.from_env(), + processing_graph=ProcessingGraphConfig.from_env(), + queue=QueueConfig.from_env(), + api=ApiConfig.from_env(), + ) diff --git a/services/api/tests/conftest.py b/services/api/tests/conftest.py index db3b83b1..708ef6c6 100644 --- a/services/api/tests/conftest.py +++ b/services/api/tests/conftest.py @@ -26 +26 @@ def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: - app_config = AppConfig() + app_config = AppConfig.from_env() @@ -34 +34 @@ def uvicorn_config(monkeypatch_session: MonkeyPatch): - return UvicornConfig() + return UvicornConfig.from_env() diff --git a/services/api/tests/test_app_real.py b/services/api/tests/test_app_real.py index 28958063..721de040 100644 --- a/services/api/tests/test_app_real.py +++ b/services/api/tests/test_app_real.py @@ -33 +33 @@ def real_app_config(real_monkeypatch: MonkeyPatch) -> AppConfig: - app_config = AppConfig() + app_config = AppConfig.from_env() diff --git a/tools/docker-compose-base.yml b/tools/docker-compose-base.yml index 1613fbf2..8d98fc0e 100644 --- a/tools/docker-compose-base.yml +++ b/tools/docker-compose-base.yml @@ -19,3 +19,4 @@ services: - WORKER_MAX_LOAD_PCT: ${WORKER_MAX_LOAD_PCT-70} - WORKER_MAX_MEMORY_PCT: ${WORKER_MAX_MEMORY_PCT-80} - WORKER_SLEEP_SECONDS: ${WORKER_SLEEP_SECONDS-15} + WORKER_LOOP_MAX_DISK_USAGE_PCT: ${WORKER_LOOP_MAX_DISK_USAGE_PCT-90} + WORKER_LOOP_MAX_LOAD_PCT: ${WORKER_LOOP_MAX_LOAD_PCT-70} + WORKER_LOOP_MAX_MEMORY_PCT: ${WORKER_LOOP_MAX_MEMORY_PCT-80} + WORKER_LOOP_SLEEP_SECONDS: ${WORKER_LOOP_SLEEP_SECONDS-15} diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index ffcc7a8e..d25532c4 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -109,0 +110,2 @@ services: + WORKER_LOOP_STORAGE_PATHS: ${ASSETS_STORAGE_DIRECTORY-/assets} + # ^ note: the datasets cache is automatically added, so no need to add it here diff --git a/workers/datasets_based/README.md b/workers/datasets_based/README.md index a22cee6b..893722fc 100644 --- a/workers/datasets_based/README.md +++ b/workers/datasets_based/README.md @@ -13 +13 @@ Set environment variables to configure the datasets-based worker (`DATASETS_BASE -- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. Allowed values: `/splits`, `/first_rows`, and ` /parquet``. Defaults to `/splits`. +- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. Allowed values: `/splits`, `/first_rows`, and ` /parquet`. Defaults to `/splits`. @@ -19,0 +20,2 @@ Also, set the modules cache configuration for the datasets-based worker. See [.. +Note that both directories will be appended to `WORKER_LOOP_STORAGE_PATHS` (see [../../libs/libcommon/README.md](../../libs/libcommon/README.md)) to hold the workers when the disk is full. + @@ -25,0 +28,2 @@ Numba requires setting the `NUMBA_CACHE_DIR` environment variable to a writable +Note that this directory will be appended to `WORKER_LOOP_STORAGE_PATHS` (see [../../libs/libcommon/README.md](../../libs/libcommon/README.md)) to hold the workers when the disk is full. + diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 21c51fef..08cc79e0 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -891 +891 @@ name = "libcommon" -version = "0.5.10" +version = "0.6.0" @@ -909 +909 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl" @@ -2460 +2460 @@ python-versions = "3.9.15" -content-hash = "61028ae835ec768d358c2ba4cf82ac6122e438b9a5fe4ff4e755483dcb178eba" +content-hash = "9732f429697818b1b3b3dca094f90c50eb148d747a00c0f43ee8bd9255bdbf39" @@ -3367 +3367 @@ libcommon = [ - {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, + {file = "libcommon-0.6.0-py3-none-any.whl", hash = "sha256:88e136a35ce22164fc29c0a37dbdf84051fae86884074a605c7455e5e7d2d704"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 5ffd565b..0a7a9eb7 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.6.0-py3-none-any.whl", develop = false } diff --git a/workers/datasets_based/src/datasets_based/config.py b/workers/datasets_based/src/datasets_based/config.py index 96656d20..2e0a9970 100644 --- a/workers/datasets_based/src/datasets_based/config.py +++ b/workers/datasets_based/src/datasets_based/config.py @@ -3,0 +4 @@ +from dataclasses import dataclass, field @@ -5 +6 @@ from pathlib import Path -from typing import List, Optional +from typing import List, Optional, Union @@ -16 +17 @@ from libcommon.config import ( - WorkerConfig, + WorkerLoopConfig, @@ -18,0 +20,2 @@ from libcommon.config import ( +DATASETS_BASED_ENDPOINT = "/splits" +DATASETS_BASED_HF_DATASETS_CACHE = None @@ -20,4 +22,0 @@ from libcommon.config import ( -class DatasetsBasedConfig: - endpoint: str - hf_datasets_cache: Path - max_disk_usage_percent = 90 # hard-coded, not configurable @@ -25,6 +24,4 @@ class DatasetsBasedConfig: - def __init__(self): - env = Env(expand_vars=True) - with env.prefixed("DATASETS_BASED_"): - self.endpoint = env.str(name="ENDPOINT", default="/splits") - self._hf_datasets_cache = env.str(name="HF_DATASETS_CACHE", default=None) - self.setup() +@dataclass +class DatasetsBasedConfig: + endpoint: str = DATASETS_BASED_ENDPOINT + _hf_datasets_cache: Union[str, Path, None] = DATASETS_BASED_HF_DATASETS_CACHE @@ -32 +29 @@ class DatasetsBasedConfig: - def setup(self) -> None: + def __post_init__(self) -> None: @@ -36,0 +34,16 @@ class DatasetsBasedConfig: + @staticmethod + def from_env() -> "DatasetsBasedConfig": + env = Env(expand_vars=True) + with env.prefixed("DATASETS_BASED_"): + return DatasetsBasedConfig( + endpoint=env.str(name="ENDPOINT", default=DATASETS_BASED_ENDPOINT), + _hf_datasets_cache=env.str(name="HF_DATASETS_CACHE", default=DATASETS_BASED_HF_DATASETS_CACHE), + ) + + +FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE = 100_000_000 +FIRST_ROWS_MAX_BYTES = 1_000_000 +FIRST_ROWS_MAX_NUMBER = 100 +FIRST_ROWS_CELL_MIN_BYTES = 100 +FIRST_ROWS_MIN_NUMBER = 10 + @@ -37,0 +51 @@ class DatasetsBasedConfig: +@dataclass @@ -39,8 +53,9 @@ class FirstRowsConfig: - assets: AssetsConfig - fallback_max_dataset_size: int - max_bytes: int - max_number: int - min_cell_bytes: int - min_number: int - - def __init__(self): + assets: AssetsConfig = field(default_factory=AssetsConfig) + fallback_max_dataset_size: int = FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE + max_bytes: int = FIRST_ROWS_MAX_BYTES + max_number: int = FIRST_ROWS_MAX_NUMBER + min_cell_bytes: int = FIRST_ROWS_CELL_MIN_BYTES + min_number: int = FIRST_ROWS_MIN_NUMBER + + @staticmethod + def from_env() -> "FirstRowsConfig": @@ -49,6 +64,10 @@ class FirstRowsConfig: - self.fallback_max_dataset_size = env.int(name="FALLBACK_MAX_DATASET_SIZE", default=100_000_000) - self.max_bytes = env.int(name="MAX_BYTES", default=1_000_000) - self.max_number = env.int(name="MAX_NUMBER", default=100) - self.min_cell_bytes = env.int(name="CELL_MIN_BYTES", default=100) - self.min_number = env.int(name="MIN_NUMBER", default=10) - self.assets = AssetsConfig() + return FirstRowsConfig( + assets=AssetsConfig.from_env(), + fallback_max_dataset_size=env.int( + name="FALLBACK_MAX_DATASET_SIZE", default=FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE + ), + max_bytes=env.int(name="MAX_BYTES", default=FIRST_ROWS_MAX_BYTES), + max_number=env.int(name="MAX_NUMBER", default=FIRST_ROWS_MAX_NUMBER), + min_cell_bytes=env.int(name="CELL_MIN_BYTES", default=FIRST_ROWS_CELL_MIN_BYTES), + min_number=env.int(name="MIN_NUMBER", default=FIRST_ROWS_MIN_NUMBER), + ) @@ -56,0 +76,13 @@ class FirstRowsConfig: +PARQUET_COMMIT_MESSAGE = "Update parquet files" +PARQUET_COMMITTER_HF_TOKEN = None +PARQUET_MAX_DATASET_SIZE = 100_000_000 +PARQUET_SOURCE_REVISION = "main" +PARQUET_TARGET_REVISION = "refs/convert/parquet" +PARQUET_URL_TEMPLATE = "/datasets/%s/resolve/%s/%s" + + +def get_empty_str_list() -> List[str]: + return [] + + +@dataclass @@ -58,10 +90,11 @@ class ParquetConfig: - blocked_datasets: List[str] - supported_datasets: List[str] - commit_message: str - committer_hf_token: Optional[str] - max_dataset_size: int - source_revision: str - target_revision: str - url_template: str - - def __init__(self): + blocked_datasets: List[str] = field(default_factory=get_empty_str_list) + supported_datasets: List[str] = field(default_factory=get_empty_str_list) + commit_message: str = PARQUET_COMMIT_MESSAGE + committer_hf_token: Optional[str] = PARQUET_COMMITTER_HF_TOKEN + max_dataset_size: int = PARQUET_MAX_DATASET_SIZE + source_revision: str = PARQUET_SOURCE_REVISION + target_revision: str = PARQUET_TARGET_REVISION + url_template: str = PARQUET_URL_TEMPLATE + + @staticmethod + def from_env() -> "ParquetConfig": @@ -70,10 +103,13 @@ class ParquetConfig: - self.blocked_datasets = env.list(name="BLOCKED_DATASETS", default=[]) - self.supported_datasets = env.list(name="SUPPORTED_DATASETS", default=[]) - self.commit_message = env.str(name="COMMIT_MESSAGE", default="Update parquet files") - self.committer_hf_token = env.str(name="COMMITTER_HF_TOKEN", default=None) - self.max_dataset_size = env.int(name="MAX_DATASET_SIZE", default=100_000_000) - self.source_revision = env.str(name="SOURCE_REVISION", default="main") - self.target_revision = env.str(name="TARGET_REVISION", default="refs/convert/parquet") - self.url_template = env.str(name="URL_TEMPLATE", default="/datasets/%s/resolve/%s/%s") - - + return ParquetConfig( + blocked_datasets=env.list(name="BLOCKED_DATASETS", default=get_empty_str_list()), + supported_datasets=env.list(name="SUPPORTED_DATASETS", default=get_empty_str_list()), + commit_message=env.str(name="COMMIT_MESSAGE", default=PARQUET_COMMIT_MESSAGE), + committer_hf_token=env.str(name="COMMITTER_HF_TOKEN", default=PARQUET_COMMITTER_HF_TOKEN), + max_dataset_size=env.int(name="MAX_DATASET_SIZE", default=PARQUET_MAX_DATASET_SIZE), + source_revision=env.str(name="SOURCE_REVISION", default=PARQUET_SOURCE_REVISION), + target_revision=env.str(name="TARGET_REVISION", default=PARQUET_TARGET_REVISION), + url_template=env.str(name="URL_TEMPLATE", default=PARQUET_URL_TEMPLATE), + ) + + +@dataclass @@ -81,18 +117,8 @@ class AppConfig: - cache: CacheConfig - common: CommonConfig - datasets_based: DatasetsBasedConfig - processing_graph: ProcessingGraphConfig - queue: QueueConfig - worker: WorkerConfig - - def __init__(self): - # First process the common configuration to setup the logging - self.common = CommonConfig() - self.cache = CacheConfig() - self.datasets_based = DatasetsBasedConfig() - self.processing_graph = ProcessingGraphConfig() - self.queue = QueueConfig() - self.worker = WorkerConfig() - self.setup() - - def setup(self): + cache: CacheConfig = field(default_factory=CacheConfig) + common: CommonConfig = field(default_factory=CommonConfig) + datasets_based: DatasetsBasedConfig = field(default_factory=DatasetsBasedConfig) + processing_graph: ProcessingGraphConfig = field(default_factory=ProcessingGraphConfig) + queue: QueueConfig = field(default_factory=QueueConfig) + worker_loop: WorkerLoopConfig = field(default_factory=WorkerLoopConfig) + + def __post_init__(self): @@ -109,0 +136,20 @@ class AppConfig: + + # Add the datasets and numba cache paths to the list of storage paths, to ensure the disk is not full + env = Env(expand_vars=True) + numba_path = env.str(name="NUMBA_CACHE_DIR", default=None) + additional_paths = {str(self.datasets_based.hf_datasets_cache), str(datasets.config.HF_MODULES_CACHE)} + if numba_path: + additional_paths.add(numba_path) + self.worker_loop.storage_paths = list(set(self.worker_loop.storage_paths).union(additional_paths)) + + @staticmethod + def from_env() -> "AppConfig": + return AppConfig( + # First process the common configuration to setup the logging + common=CommonConfig.from_env(), + cache=CacheConfig.from_env(), + datasets_based=DatasetsBasedConfig.from_env(), + processing_graph=ProcessingGraphConfig.from_env(), + queue=QueueConfig.from_env(), + worker_loop=WorkerLoopConfig.from_env(), + ) diff --git a/workers/datasets_based/src/datasets_based/main.py b/workers/datasets_based/src/datasets_based/main.py index 0020daae..219f505e 100644 --- a/workers/datasets_based/src/datasets_based/main.py +++ b/workers/datasets_based/src/datasets_based/main.py @@ -3,0 +4,3 @@ +from libcommon.queue import Queue +from libcommon.worker_loop import WorkerLoop + @@ -5 +8 @@ from datasets_based.config import AppConfig -from datasets_based.worker import get_worker +from datasets_based.worker_factory import DatasetBasedWorkerFactory @@ -8,3 +11,10 @@ if __name__ == "__main__": - app_config = AppConfig() - worker = get_worker(app_config) - worker.loop() + app_config = AppConfig.from_env() + processing_step = app_config.processing_graph.graph.get_step(app_config.datasets_based.endpoint) + worker_factory = DatasetBasedWorkerFactory(app_config=app_config) + queue = Queue(type=processing_step.job_type, max_jobs_per_namespace=app_config.queue.max_jobs_per_namespace) + worker_loop = WorkerLoop( + queue=queue, + worker_factory=worker_factory, + worker_loop_config=app_config.worker_loop, + ) + worker_loop.loop() diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py deleted file mode 100644 index 18fcb382..00000000 --- a/workers/datasets_based/src/datasets_based/worker.py +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from datasets_based.config import AppConfig -from datasets_based.workers import DatasetsBasedWorker, worker_class_by_endpoint - - -def get_worker(app_config: AppConfig) -> DatasetsBasedWorker: - """Get the worker for the current environment.""" - - endpoint = app_config.datasets_based.endpoint - try: - worker = worker_class_by_endpoint[endpoint](app_config=app_config) - except KeyError as e: - raise ValueError( - f"Unknown worker name '{endpoint}'. Available workers are: {list(worker_class_by_endpoint.keys())}" - ) from e - return worker diff --git a/workers/datasets_based/src/datasets_based/worker_factory.py b/workers/datasets_based/src/datasets_based/worker_factory.py new file mode 100644 index 00000000..a032f9a7 --- /dev/null +++ b/workers/datasets_based/src/datasets_based/worker_factory.py @@ -0,0 +1,34 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from libcommon.worker import JobInfo, Worker, WorkerFactory + +from datasets_based.config import AppConfig, FirstRowsConfig, ParquetConfig +from datasets_based.workers.first_rows import FirstRowsWorker +from datasets_based.workers.parquet import ParquetWorker +from datasets_based.workers.splits import SplitsWorker + + +class DatasetBasedWorkerFactory(WorkerFactory): + def __init__(self, app_config: AppConfig) -> None: + self.app_config = app_config + + def _create_worker(self, job_info: JobInfo) -> Worker: + job_type = job_info["type"] + if job_type == SplitsWorker.get_job_type(): + return SplitsWorker(job_info=job_info, app_config=self.app_config) + elif job_type == FirstRowsWorker.get_job_type(): + return FirstRowsWorker( + job_info=job_info, app_config=self.app_config, first_rows_config=FirstRowsConfig.from_env() + ) + elif job_type == ParquetWorker.get_job_type(): + return ParquetWorker( + job_info=job_info, app_config=self.app_config, parquet_config=ParquetConfig.from_env() + ) + else: + supported_job_types = [ + SplitsWorker.get_job_type(), + FirstRowsWorker.get_job_type(), + ParquetWorker.get_job_type(), + ] + raise ValueError(f"Unsupported job type: '{job_type}'. The supported job types are: {supported_job_types}") diff --git a/workers/datasets_based/src/datasets_based/workers/__init__.py b/workers/datasets_based/src/datasets_based/workers/__init__.py index 71632e41..1e9d0c5a 100644 --- a/workers/datasets_based/src/datasets_based/workers/__init__.py +++ b/workers/datasets_based/src/datasets_based/workers/__init__.py @@ -3,13 +2,0 @@ - -from typing import List, Type - -from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker -from datasets_based.workers.first_rows import FirstRowsWorker -from datasets_based.workers.parquet import ParquetWorker -from datasets_based.workers.splits import SplitsWorker - -worker_classes: List[Type[DatasetsBasedWorker]] = [FirstRowsWorker, ParquetWorker, SplitsWorker] -worker_class_by_endpoint = {worker_class.get_endpoint(): worker_class for worker_class in worker_classes} - -# explicit re-export -__all__ = ["DatasetsBasedWorker"] diff --git a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py index b8f63ea9..31996058 100644 --- a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py +++ b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py @@ -7 +6,0 @@ import re -from abc import ABC, abstractmethod @@ -15,2 +14 @@ from libcommon.storage import init_dir, remove_dir -from libcommon.worker import Worker -from psutil import disk_usage +from libcommon.worker import JobInfo, Worker @@ -21 +19 @@ from datasets_based.config import AppConfig, DatasetsBasedConfig -class DatasetsBasedWorker(Worker, ABC): +class DatasetsBasedWorker(Worker): @@ -26,5 +23,0 @@ class DatasetsBasedWorker(Worker, ABC): - @staticmethod - @abstractmethod - def get_endpoint() -> str: - pass - @@ -36,12 +29,2 @@ class DatasetsBasedWorker(Worker, ABC): - def __init__(self, app_config: AppConfig, version: str = "1.0.0"): - super().__init__( - processing_step=app_config.processing_graph.graph.get_step(self.get_endpoint()), - # ^ raises if the step is not found - common_config=app_config.common, - queue_config=app_config.queue, - worker_config=app_config.worker, - version=version, - ) - self.datasets_based_config = app_config.datasets_based - - def has_storage(self) -> bool: + def __init__(self, job_info: JobInfo, app_config: AppConfig) -> None: + job_type = job_info["type"] @@ -49,5 +32,8 @@ class DatasetsBasedWorker(Worker, ABC): - usage = disk_usage(str(self.datasets_based_config.hf_datasets_cache)) - return usage.percent < self.datasets_based_config.max_disk_usage_percent - except Exception: - # if we can't get the disk usage, we let the process continue - return True + processing_step = app_config.processing_graph.graph.get_step_by_job_type(job_type) + except ValueError as e: + raise ValueError( + f"Unsupported job type: '{job_type}'. The job types declared in the processing graph are:" + f" {[step.job_type for step in app_config.processing_graph.graph.steps.values()]}" + ) from e + super().__init__(job_info=job_info, common_config=app_config.common, processing_step=processing_step) + self.datasets_based_config = app_config.datasets_based @@ -55,8 +41 @@ class DatasetsBasedWorker(Worker, ABC): - def get_cache_subdirectory( - self, - date: datetime, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> str: + def get_cache_subdirectory(self, date: datetime) -> str: @@ -64 +43 @@ class DatasetsBasedWorker(Worker, ABC): - payload = (date_str, self.get_endpoint(), dataset, config, split, force) + payload = (date_str, self.get_job_type(), self.dataset, self.config, self.split, self.force) @@ -66 +45 @@ class DatasetsBasedWorker(Worker, ABC): - prefix = f"{date_str}-{self.get_endpoint()}-{dataset}"[:64] + prefix = f"{date_str}-{self.get_job_type()}-{self.dataset}"[:64] @@ -89,6 +68,2 @@ class DatasetsBasedWorker(Worker, ABC): - def set_cache( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> None: - cache_subdirectory = self.get_cache_subdirectory( - date=datetime.now(), dataset=dataset, config=config, split=split, force=force - ) + def set_cache(self) -> None: + cache_subdirectory = self.get_cache_subdirectory(date=datetime.now()) @@ -100,4 +75,2 @@ class DatasetsBasedWorker(Worker, ABC): - def pre_compute( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> None: - self.set_cache(dataset=dataset, config=config, split=split, force=force) + def pre_compute(self) -> None: + self.set_cache() @@ -105,3 +78 @@ class DatasetsBasedWorker(Worker, ABC): - def post_compute( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> None: + def post_compute(self) -> None: diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index 241bd22f..4811a9d2 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -23 +23 @@ from libcommon.utils import orjson_dumps -from libcommon.worker import ConfigNotFoundError, SplitNotFoundError +from libcommon.worker import ConfigNotFoundError, JobInfo, SplitNotFoundError @@ -550,3 +549,0 @@ def compute_first_rows_response( -FIRST_ROWS_VERSION = "2.0.0" - - @@ -557 +554 @@ class FirstRowsWorker(DatasetsBasedWorker): - def get_endpoint() -> str: + def get_job_type() -> str: @@ -560,3 +557,3 @@ class FirstRowsWorker(DatasetsBasedWorker): - def __init__(self, app_config: AppConfig): - super().__init__(version=FIRST_ROWS_VERSION, app_config=app_config) - self.first_rows_config = FirstRowsConfig() + @staticmethod + def get_version() -> str: + return "2.0.0" @@ -564,8 +561,6 @@ class FirstRowsWorker(DatasetsBasedWorker): - def compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> Mapping[str, Any]: - if config is None or split is None: + def __init__(self, job_info: JobInfo, app_config: AppConfig, first_rows_config: FirstRowsConfig) -> None: + super().__init__(job_info=job_info, app_config=app_config) + self.first_rows_config = first_rows_config + + def compute(self) -> Mapping[str, Any]: + if self.config is None or self.split is None: @@ -574,3 +569,3 @@ class FirstRowsWorker(DatasetsBasedWorker): - dataset=dataset, - config=config, - split=split, + dataset=self.dataset, + config=self.config, + split=self.split, diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index 82efa4b8..d5e22df4 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -27 +27 @@ from libcommon.exceptions import CustomError -from libcommon.worker import DatasetNotFoundError +from libcommon.worker import DatasetNotFoundError, JobInfo @@ -569,3 +568,0 @@ def compute_parquet_response( -PARQUET_VERSION = "2.0.0" - - @@ -576 +573 @@ class ParquetWorker(DatasetsBasedWorker): - def get_endpoint() -> str: + def get_job_type() -> str: @@ -579,3 +576,3 @@ class ParquetWorker(DatasetsBasedWorker): - def __init__(self, app_config: AppConfig): - super().__init__(version=PARQUET_VERSION, app_config=app_config) - self.parquet_config = ParquetConfig() + @staticmethod + def get_version() -> str: + return "2.0.0" @@ -583,7 +580,5 @@ class ParquetWorker(DatasetsBasedWorker): - def compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> Mapping[str, Any]: + def __init__(self, job_info: JobInfo, app_config: AppConfig, parquet_config: ParquetConfig) -> None: + super().__init__(job_info=job_info, app_config=app_config) + self.parquet_config = parquet_config + + def compute(self) -> Mapping[str, Any]: @@ -591 +586 @@ class ParquetWorker(DatasetsBasedWorker): - dataset=dataset, + dataset=self.dataset, diff --git a/workers/datasets_based/src/datasets_based/workers/splits.py b/workers/datasets_based/src/datasets_based/workers/splits.py index 55f74249..9cbb0187 100644 --- a/workers/datasets_based/src/datasets_based/workers/splits.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -15,0 +16 @@ from libcommon.exceptions import CustomError +from libcommon.queue import Queue @@ -17 +17,0 @@ from libcommon.simple_cache import delete_response, get_dataset_response_ids -from libcommon.worker import Queue @@ -19 +18,0 @@ from libcommon.worker import Queue -from datasets_based.config import AppConfig @@ -165,3 +163,0 @@ def compute_splits_response( -SPLITS_VERSION = "2.0.0" - - @@ -170 +166 @@ class SplitsWorker(DatasetsBasedWorker): - def get_endpoint() -> str: + def get_job_type() -> str: @@ -173,2 +169,3 @@ class SplitsWorker(DatasetsBasedWorker): - def __init__(self, app_config: AppConfig): - super().__init__(version=SPLITS_VERSION, app_config=app_config) + @staticmethod + def get_version() -> str: + return "2.0.0" @@ -176,8 +173,2 @@ class SplitsWorker(DatasetsBasedWorker): - def compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> Mapping[str, Any]: - content = compute_splits_response(dataset=dataset, hf_token=self.common_config.hf_token) + def compute(self) -> Mapping[str, Any]: + content = compute_splits_response(dataset=self.dataset, hf_token=self.common_config.hf_token) @@ -188 +179,3 @@ class SplitsWorker(DatasetsBasedWorker): - Queue(type=step.job_type).add_job(dataset=dataset, config=config, split=split, force=force) + Queue(type=step.job_type).add_job( + dataset=self.dataset, config=self.config, split=self.split, force=self.force + ) @@ -193 +186 @@ class SplitsWorker(DatasetsBasedWorker): - for s in get_dataset_response_ids(dataset=dataset) + for s in get_dataset_response_ids(dataset=self.dataset) @@ -201 +194 @@ class SplitsWorker(DatasetsBasedWorker): - f" splits of dataset={dataset}" + f" splits of dataset={self.dataset}" @@ -206,2 +199,2 @@ class SplitsWorker(DatasetsBasedWorker): - Queue(type=step.job_type).add_job(dataset=d, config=c, split=s, force=force) - logging.debug(f"{len(new_splits)} {step.endpoint} jobs added for the splits of dataset={dataset}") + Queue(type=step.job_type).add_job(dataset=d, config=c, split=s, force=self.force) + logging.debug(f"{len(new_splits)} {step.job_type} jobs added for the splits of dataset={self.dataset}") diff --git a/workers/datasets_based/tests/conftest.py b/workers/datasets_based/tests/conftest.py index f6b56567..78bcc47e 100644 --- a/workers/datasets_based/tests/conftest.py +++ b/workers/datasets_based/tests/conftest.py @@ -11 +11 @@ from pytest import MonkeyPatch, fixture -from datasets_based.config import AppConfig, FirstRowsConfig, ParquetConfig +from datasets_based.config import AppConfig, FirstRowsConfig @@ -58 +58 @@ def app_config(set_env_vars: MonkeyPatch) -> Iterator[AppConfig]: - app_config = AppConfig() + app_config = AppConfig.from_env() @@ -70,7 +70,2 @@ def app_config(set_env_vars: MonkeyPatch) -> Iterator[AppConfig]: -def first_rows_config() -> FirstRowsConfig: - return FirstRowsConfig() - - -@fixture -def parquet_config() -> ParquetConfig: - return ParquetConfig() +def first_rows_config(set_env_vars: MonkeyPatch) -> FirstRowsConfig: + return FirstRowsConfig.from_env() diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py deleted file mode 100644 index db013c1d..00000000 --- a/workers/datasets_based/tests/test_worker.py +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Optional - -import pytest - -from datasets_based.config import AppConfig -from datasets_based.worker import get_worker - - [email protected]( - "endpoint,expected_worker", - [ - (None, "SplitsWorker"), - ("/splits", "SplitsWorker"), - ("/first-rows", "SplitsWorker"), - ("/unknown", None), - ], -) -def test_get_worker(app_config: AppConfig, endpoint: Optional[str], expected_worker: Optional[str]) -> None: - if endpoint is not None: - app_config.datasets_based.endpoint = endpoint - if expected_worker is None: - with pytest.raises(ValueError): - get_worker(app_config) - else: - worker = get_worker(app_config) - worker.__class__.__name__ == expected_worker diff --git a/workers/datasets_based/tests/test_worker_factory.py b/workers/datasets_based/tests/test_worker_factory.py new file mode 100644 index 00000000..d662bd4d --- /dev/null +++ b/workers/datasets_based/tests/test_worker_factory.py @@ -0,0 +1,37 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Optional + +import pytest +from libcommon.worker import JobInfo + +from datasets_based.config import AppConfig +from datasets_based.worker_factory import DatasetBasedWorkerFactory + + [email protected]( + "job_type,expected_worker", + [ + ("/splits", "SplitsWorker"), + ("/first-rows", "FirstRowsWorker"), + ("/parquet", "ParquetWorker"), + ("/unknown", None), + ], +) +def test_create_worker(app_config: AppConfig, job_type: str, expected_worker: Optional[str]) -> None: + worker_factory = DatasetBasedWorkerFactory(app_config=app_config) + job_info: JobInfo = { + "type": job_type, + "dataset": "dataset", + "config": "config", + "split": "split", + "job_id": "job_id", + "force": False, + } + if expected_worker is None: + with pytest.raises(ValueError): + worker_factory.create_worker(job_info=job_info) + else: + worker = worker_factory.create_worker(job_info=job_info) + assert worker.__class__.__name__ == expected_worker diff --git a/workers/datasets_based/tests/workers/test__datasets_based_worker.py b/workers/datasets_based/tests/workers/test__datasets_based_worker.py index 5cec7039..c9b946b0 100644 --- a/workers/datasets_based/tests/workers/test__datasets_based_worker.py +++ b/workers/datasets_based/tests/workers/test__datasets_based_worker.py @@ -4 +3,0 @@ -import os @@ -14,0 +14,2 @@ from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker +from ..fixtures.hub import get_default_config_split + @@ -18 +19 @@ class DummyWorker(DatasetsBasedWorker): - def get_endpoint() -> str: + def get_job_type() -> str: @@ -23,4 +24,6 @@ class DummyWorker(DatasetsBasedWorker): - def compute( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> Mapping[str, Any]: - if config == "raise": + @staticmethod + def get_version() -> str: + return "1.0.0" + + def compute(self) -> Mapping[str, Any]: + if self.config == "raise": @@ -32,7 +35,24 @@ class DummyWorker(DatasetsBasedWorker): [email protected] -def worker(app_config: AppConfig) -> DummyWorker: - return DummyWorker(app_config=app_config) - - -def test_version(worker: DummyWorker) -> None: - assert len(worker.version.split(".")) == 3 +def get_worker( + dataset: str, + config: Optional[str], + split: Optional[str], + app_config: AppConfig, + force: bool = False, +) -> DummyWorker: + return DummyWorker( + job_info={ + "type": DummyWorker.get_job_type(), + "dataset": dataset, + "config": config, + "split": split, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + ) + + +def test_version(app_config: AppConfig) -> None: + dataset, config, split = get_default_config_split("dataset") + worker = get_worker(dataset, config, split, app_config) + assert len(worker.get_version().split(".")) == 3 @@ -43,9 +62,0 @@ def test_version(worker: DummyWorker) -> None: -def test_has_storage(worker: DummyWorker) -> None: - assert worker.has_storage() is True - worker.datasets_based_config.max_disk_usage_percent = 0 - # the directory does not exist yet, so it should return True - assert worker.has_storage() is True - os.makedirs(worker.datasets_based_config.hf_datasets_cache, exist_ok=True) - assert worker.has_storage() is False - - @@ -73 +84 @@ def test_get_cache_subdirectory( - worker: DummyWorker, dataset: str, config: Optional[str], split: Optional[str], force: bool, expected: str + app_config: AppConfig, dataset: str, config: Optional[str], split: Optional[str], force: bool, expected: str @@ -76,2 +87,2 @@ def test_get_cache_subdirectory( - subdirectory = worker.get_cache_subdirectory(date=date, dataset=dataset, config=config, split=split, force=force) - assert subdirectory == expected + worker = get_worker(dataset, config, split, app_config, force=force) + assert worker.get_cache_subdirectory(date=date) == expected @@ -80 +91,3 @@ def test_get_cache_subdirectory( -def test_set_and_unset_datasets_cache(worker: DummyWorker) -> None: +def test_set_and_unset_datasets_cache(app_config: AppConfig) -> None: + dataset, config, split = get_default_config_split("dataset") + worker = get_worker(dataset, config, split, app_config) @@ -89 +102,3 @@ def test_set_and_unset_datasets_cache(worker: DummyWorker) -> None: -def test_set_and_unset_cache(worker: DummyWorker) -> None: +def test_set_and_unset_cache(app_config: AppConfig) -> None: + dataset, config, split = get_default_config_split("user/dataset") + worker = get_worker(dataset, config, split, app_config) @@ -91 +106 @@ def test_set_and_unset_cache(worker: DummyWorker) -> None: - worker.set_cache(dataset="user/dataset", config="config", split="split", force=True) + worker.set_cache() @@ -99 +114 @@ def test_set_and_unset_cache(worker: DummyWorker) -> None: -def test_process(worker: DummyWorker, hub_public_csv: str, config: str) -> None: +def test_process(app_config: AppConfig, hub_public_csv: str, config: str) -> None: @@ -101,0 +117,3 @@ def test_process(worker: DummyWorker, hub_public_csv: str, config: str) -> None: + dataset = hub_public_csv + split = "split" + worker = get_worker(dataset, config, split, app_config) @@ -105 +123 @@ def test_process(worker: DummyWorker, hub_public_csv: str, config: str) -> None: - result = worker.process(dataset=hub_public_csv, config=config, force=True) + result = worker.process() diff --git a/workers/datasets_based/tests/workers/test_first_rows.py b/workers/datasets_based/tests/workers/test_first_rows.py index ddd059d8..60fc1f39 100644 --- a/workers/datasets_based/tests/workers/test_first_rows.py +++ b/workers/datasets_based/tests/workers/test_first_rows.py @@ -3,0 +4 @@ +from dataclasses import replace @@ -12,5 +13 @@ from datasets_based.config import AppConfig, FirstRowsConfig -from datasets_based.workers.first_rows import ( - FirstRowsWorker, - compute_first_rows_response, - get_json_size, -) +from datasets_based.workers.first_rows import FirstRowsWorker, get_json_size @@ -21,3 +18,20 @@ from ..fixtures.hub import HubDatasets, get_default_config_split [email protected] -def worker(app_config: AppConfig) -> FirstRowsWorker: - return FirstRowsWorker(app_config=app_config) +def get_worker( + dataset: str, + config: str, + split: str, + app_config: AppConfig, + first_rows_config: FirstRowsConfig, + force: bool = False, +) -> FirstRowsWorker: + return FirstRowsWorker( + job_info={ + "type": FirstRowsWorker.get_job_type(), + "dataset": dataset, + "config": config, + "split": split, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + first_rows_config=first_rows_config, + ) @@ -26 +40 @@ def worker(app_config: AppConfig) -> FirstRowsWorker: -def should_skip_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: +def should_skip_job(app_config: AppConfig, first_rows_config: FirstRowsConfig, hub_public_csv: str) -> None: @@ -28 +42,2 @@ def should_skip_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: - assert worker.should_skip_job(dataset=dataset, config=config, split=split) is False + worker = get_worker(dataset, config, split, app_config, first_rows_config) + assert worker.should_skip_job() is False @@ -30,3 +45,4 @@ def should_skip_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: - worker.process(dataset=dataset, config=config, split=split) - assert worker.should_skip_job(dataset=dataset, config=config, split=split) is True - assert worker.should_skip_job(dataset=dataset, config=config, split=split, force=False) is False + worker.process() + assert worker.should_skip_job() is True + worker = get_worker(dataset, config, split, app_config, first_rows_config, force=True) + assert worker.should_skip_job() is False @@ -35 +51 @@ def should_skip_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: -def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: +def test_compute(app_config: AppConfig, first_rows_config: FirstRowsConfig, hub_public_csv: str) -> None: @@ -37 +53,2 @@ def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: - assert worker.process(dataset=dataset, config=config, split=split) is True + worker = get_worker(dataset, config, split, app_config, first_rows_config) + assert worker.process() is True @@ -41 +58 @@ def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: - assert cached_response["worker_version"] == worker.version + assert cached_response["worker_version"] == worker.get_version() @@ -52 +69 @@ def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: -def test_doesnotexist(worker: FirstRowsWorker) -> None: +def test_doesnotexist(app_config: AppConfig, first_rows_config: FirstRowsConfig) -> None: @@ -55 +72,2 @@ def test_doesnotexist(worker: FirstRowsWorker) -> None: - assert worker.process(dataset=dataset, config=config, split=split) is False + worker = get_worker(dataset, config, split, app_config, first_rows_config) + assert worker.process() is False @@ -60,7 +77,0 @@ def test_doesnotexist(worker: FirstRowsWorker) -> None: -def test_process_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: - dataset, config, split = get_default_config_split(hub_public_csv) - worker.queue.add_job(dataset=dataset, config=config, split=split) - result = worker.process_next_job() - assert result is True - - @@ -102,0 +114,7 @@ def test_number_rows( + worker = get_worker( + dataset, + config, + split, + app_config if use_token else replace(app_config, common=replace(app_config.common, hf_token=None)), + first_rows_config, + ) @@ -104,13 +122 @@ def test_number_rows( - result = compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=first_rows_config.assets.base_url, - assets_directory=first_rows_config.assets.storage_directory, - hf_token=app_config.common.hf_token if use_token else None, - max_size_fallback=first_rows_config.fallback_max_dataset_size, - rows_max_number=first_rows_config.max_number, - rows_min_number=first_rows_config.min_number, - rows_max_bytes=first_rows_config.max_bytes, - min_cell_bytes=first_rows_config.min_cell_bytes, - ) + result = worker.compute() @@ -120,13 +126 @@ def test_number_rows( - compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=first_rows_config.assets.base_url, - assets_directory=first_rows_config.assets.storage_directory, - hf_token=app_config.common.hf_token if use_token else None, - max_size_fallback=first_rows_config.fallback_max_dataset_size, - rows_max_number=first_rows_config.max_number, - rows_min_number=first_rows_config.min_number, - rows_max_bytes=first_rows_config.max_bytes, - min_cell_bytes=first_rows_config.min_cell_bytes, - ) + worker.compute() @@ -170,12 +164,12 @@ def test_truncation( - response = compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=first_rows_config.assets.base_url, - assets_directory=first_rows_config.assets.storage_directory, - hf_token=None, - max_size_fallback=first_rows_config.fallback_max_dataset_size, - rows_max_number=1_000_000, - rows_min_number=10, - rows_max_bytes=rows_max_bytes, - min_cell_bytes=10, + worker = get_worker( + dataset, + config, + split, + app_config=replace(app_config, common=replace(app_config.common, hf_token=None)), + first_rows_config=replace( + first_rows_config, + max_number=1_000_000, + min_number=10, + max_bytes=rows_max_bytes, + min_cell_bytes=10, + ), @@ -182,0 +177 @@ def test_truncation( + response = worker.compute() diff --git a/workers/datasets_based/tests/workers/test_parquet.py b/workers/datasets_based/tests/workers/test_parquet.py index 450ad376..23d420cb 100644 --- a/workers/datasets_based/tests/workers/test_parquet.py +++ b/workers/datasets_based/tests/workers/test_parquet.py @@ -20 +19,0 @@ from datasets_based.workers.parquet import ( - compute_parquet_response, @@ -49,2 +48,2 @@ def set_supported_datasets(hub_datasets: HubDatasets) -> Iterator[pytest.MonkeyP -def worker(app_config: AppConfig) -> ParquetWorker: - return ParquetWorker(app_config=app_config) +def parquet_config(set_env_vars: pytest.MonkeyPatch, set_supported_datasets: pytest.MonkeyPatch) -> ParquetConfig: + return ParquetConfig.from_env() @@ -53 +52,21 @@ def worker(app_config: AppConfig) -> ParquetWorker: -def test_compute(worker: ParquetWorker, hub_datasets: HubDatasets) -> None: +def get_worker( + dataset: str, + app_config: AppConfig, + parquet_config: ParquetConfig, + force: bool = False, +) -> ParquetWorker: + return ParquetWorker( + job_info={ + "type": ParquetWorker.get_job_type(), + "dataset": dataset, + "config": None, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + parquet_config=parquet_config, + ) + + +def test_compute(app_config: AppConfig, parquet_config: ParquetConfig, hub_datasets: HubDatasets) -> None: @@ -55 +74,2 @@ def test_compute(worker: ParquetWorker, hub_datasets: HubDatasets) -> None: - assert worker.process(dataset=dataset) is True + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + assert worker.process() is True @@ -59 +79 @@ def test_compute(worker: ParquetWorker, hub_datasets: HubDatasets) -> None: - assert cached_response["worker_version"] == worker.version + assert cached_response["worker_version"] == worker.get_version() @@ -66 +86 @@ def test_compute(worker: ParquetWorker, hub_datasets: HubDatasets) -> None: -def test_doesnotexist(worker: ParquetWorker) -> None: +def test_doesnotexist(app_config: AppConfig, parquet_config: ParquetConfig) -> None: @@ -68 +88,2 @@ def test_doesnotexist(worker: ParquetWorker) -> None: - assert worker.process(dataset=dataset) is False + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + assert worker.process() is False @@ -173 +194 @@ def test_raise_if_not_supported( -def test_not_supported_if_big(worker: ParquetWorker, hub_public_big: str) -> None: +def test_not_supported_if_big(app_config: AppConfig, parquet_config: ParquetConfig, hub_public_big: str) -> None: @@ -175,2 +196,4 @@ def test_not_supported_if_big(worker: ParquetWorker, hub_public_big: str) -> Non - assert worker.process(dataset=hub_public_big) is False - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_big) + dataset = hub_public_big + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + assert worker.process() is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) @@ -181 +204 @@ def test_not_supported_if_big(worker: ParquetWorker, hub_public_big: str) -> Non -def test_supported_if_gated(worker: ParquetWorker, hub_gated_csv: str) -> None: +def test_supported_if_gated(app_config: AppConfig, parquet_config: ParquetConfig, hub_gated_csv: str) -> None: @@ -183,2 +206,4 @@ def test_supported_if_gated(worker: ParquetWorker, hub_gated_csv: str) -> None: - assert worker.process(dataset=hub_gated_csv) is True - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_gated_csv) + dataset = hub_gated_csv + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + assert worker.process() is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) @@ -189,2 +214,3 @@ def test_supported_if_gated(worker: ParquetWorker, hub_gated_csv: str) -> None: [email protected] -def test_not_supported_if_gated_with_extra_fields(worker: ParquetWorker, hub_gated_extra_fields_csv: str) -> None: +def test_not_supported_if_gated_with_extra_fields( + app_config: AppConfig, parquet_config: ParquetConfig, hub_gated_extra_fields_csv: str +) -> None: @@ -192,2 +218,4 @@ def test_not_supported_if_gated_with_extra_fields(worker: ParquetWorker, hub_gat - assert worker.process(dataset=hub_gated_extra_fields_csv) is False - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_gated_extra_fields_csv) + dataset = hub_gated_extra_fields_csv + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + assert worker.process() is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) @@ -198,2 +226 @@ def test_not_supported_if_gated_with_extra_fields(worker: ParquetWorker, hub_gat [email protected] -def test_blocked(worker: ParquetWorker, hub_public_jsonl: str) -> None: +def test_blocked(app_config: AppConfig, parquet_config: ParquetConfig, hub_public_jsonl: str) -> None: @@ -201,2 +228,4 @@ def test_blocked(worker: ParquetWorker, hub_public_jsonl: str) -> None: - assert worker.process(dataset=hub_public_jsonl) is False - cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_jsonl) + dataset = hub_public_jsonl + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + assert worker.process() is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) @@ -207,8 +235,0 @@ def test_blocked(worker: ParquetWorker, hub_public_jsonl: str) -> None: [email protected] -def test_process_job(worker: ParquetWorker, hub_public_csv: str) -> None: - worker.queue.add_job(dataset=hub_public_csv) - result = worker.process_next_job() - assert result is True - - [email protected] @@ -224,13 +245,2 @@ def test_compute_splits_response_simple_csv_ok( - result = compute_parquet_response( - dataset=dataset, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - committer_hf_token=parquet_config.committer_hf_token, - source_revision=parquet_config.source_revision, - target_revision=parquet_config.target_revision, - commit_message=parquet_config.commit_message, - url_template=parquet_config.url_template, - supported_datasets=parquet_config.supported_datasets, - blocked_datasets=parquet_config.blocked_datasets, - max_dataset_size=parquet_config.max_dataset_size, - ) + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) + result = worker.compute() @@ -257 +266,0 @@ def test_compute_splits_response_simple_csv_ok( [email protected] @@ -275,0 +285 @@ def test_compute_splits_response_simple_csv_error( + worker = get_worker(dataset=dataset, app_config=app_config, parquet_config=parquet_config) @@ -277,13 +287 @@ def test_compute_splits_response_simple_csv_error( - compute_parquet_response( - dataset=dataset, - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - committer_hf_token=parquet_config.committer_hf_token, - source_revision=parquet_config.source_revision, - target_revision=parquet_config.target_revision, - commit_message=parquet_config.commit_message, - url_template=parquet_config.url_template, - supported_datasets=parquet_config.supported_datasets, - blocked_datasets=parquet_config.blocked_datasets, - max_dataset_size=parquet_config.max_dataset_size, - ) + worker.compute() diff --git a/workers/datasets_based/tests/workers/test_splits.py b/workers/datasets_based/tests/workers/test_splits.py index 8ec8252a..919c6db4 100644 --- a/workers/datasets_based/tests/workers/test_splits.py +++ b/workers/datasets_based/tests/workers/test_splits.py @@ -3,0 +4 @@ +from dataclasses import replace @@ -11 +12 @@ from datasets_based.config import AppConfig -from datasets_based.workers.splits import SplitsWorker, compute_splits_response +from datasets_based.workers.splits import SplitsWorker @@ -16,6 +17,19 @@ from ..fixtures.hub import HubDatasets [email protected] -def worker(app_config: AppConfig) -> SplitsWorker: - return SplitsWorker(app_config=app_config) - - -def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: +def get_worker( + dataset: str, + app_config: AppConfig, + force: bool = False, +) -> SplitsWorker: + return SplitsWorker( + job_info={ + "type": SplitsWorker.get_job_type(), + "dataset": dataset, + "config": None, + "split": None, + "job_id": "job_id", + "force": force, + }, + app_config=app_config, + ) + + +def should_skip_job(app_config: AppConfig, hub_public_csv: str) -> None: @@ -23 +37,2 @@ def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: - assert worker.should_skip_job(dataset=dataset) is False + worker = get_worker(dataset, app_config) + assert worker.should_skip_job() is False @@ -25,3 +40,4 @@ def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: - worker.process(dataset=dataset) - assert worker.should_skip_job(dataset=dataset) is True - assert worker.should_skip_job(dataset=dataset, force=True) is False + worker.process() + assert worker.should_skip_job() is True + worker = get_worker(dataset, app_config, force=True) + assert worker.should_skip_job() is False @@ -30 +46 @@ def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: -def test_process(worker: SplitsWorker, hub_public_csv: str) -> None: +def test_process(app_config: AppConfig, hub_public_csv: str) -> None: @@ -32 +48,2 @@ def test_process(worker: SplitsWorker, hub_public_csv: str) -> None: - assert worker.process(dataset=dataset) is True + worker = get_worker(dataset, app_config) + assert worker.process() is True @@ -36 +53 @@ def test_process(worker: SplitsWorker, hub_public_csv: str) -> None: - assert cached_response["worker_version"] == worker.version + assert cached_response["worker_version"] == worker.get_version() @@ -45 +62 @@ def test_process(worker: SplitsWorker, hub_public_csv: str) -> None: -def test_doesnotexist(worker: SplitsWorker) -> None: +def test_doesnotexist(app_config: AppConfig) -> None: @@ -47 +64,2 @@ def test_doesnotexist(worker: SplitsWorker) -> None: - assert worker.process(dataset=dataset) is False + worker = get_worker(dataset, app_config) + assert worker.process() is False @@ -52,6 +69,0 @@ def test_doesnotexist(worker: SplitsWorker) -> None: -def test_process_job(worker: SplitsWorker, hub_public_csv: str) -> None: - worker.queue.add_job(dataset=hub_public_csv) - result = worker.process_next_job() - assert result is True - - @@ -77,0 +90,4 @@ def test_compute_splits_response_simple_csv( + worker = get_worker( + dataset, + app_config if use_token else replace(app_config, common=replace(app_config.common, hf_token=None)), + ) @@ -79,4 +95 @@ def test_compute_splits_response_simple_csv( - result = compute_splits_response( - dataset=dataset, - hf_token=app_config.common.hf_token if use_token else None, - ) + result = worker.compute() @@ -87,4 +100 @@ def test_compute_splits_response_simple_csv( - compute_splits_response( - dataset=dataset, - hf_token=app_config.common.hf_token if use_token else None, - ) + worker.compute()
746414449cae4b311733f8a76e5b3b4ca73b38a9
Sylvain Lesage
2022-12-20T22:10:58
feat: 🎸 give each worker its own version + upgrade to 2.0.0 (#667)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 47d036e1..d2967925 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-3223a24" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-e36bd91" diff --git a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py index 403d3e81..b8f63ea9 100644 --- a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py +++ b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py @@ -4 +3,0 @@ -import importlib.metadata @@ -37 +36 @@ class DatasetsBasedWorker(Worker, ABC): - def __init__(self, app_config: AppConfig): + def __init__(self, app_config: AppConfig, version: str = "1.0.0"): @@ -44 +43 @@ class DatasetsBasedWorker(Worker, ABC): - version=importlib.metadata.version(__package__.split(".")[0]), + version=version, diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index d595a32e..241bd22f 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -549,0 +550,3 @@ def compute_first_rows_response( +FIRST_ROWS_VERSION = "2.0.0" + + @@ -558 +561 @@ class FirstRowsWorker(DatasetsBasedWorker): - super().__init__(app_config=app_config) + super().__init__(version=FIRST_ROWS_VERSION, app_config=app_config) diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index b444e379..82efa4b8 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -568,0 +569,3 @@ def compute_parquet_response( +PARQUET_VERSION = "2.0.0" + + @@ -577 +580 @@ class ParquetWorker(DatasetsBasedWorker): - super().__init__(app_config=app_config) + super().__init__(version=PARQUET_VERSION, app_config=app_config) diff --git a/workers/datasets_based/src/datasets_based/workers/splits.py b/workers/datasets_based/src/datasets_based/workers/splits.py index 31e1c268..55f74249 100644 --- a/workers/datasets_based/src/datasets_based/workers/splits.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -18,0 +19 @@ from libcommon.worker import Queue +from datasets_based.config import AppConfig @@ -163,0 +165,3 @@ def compute_splits_response( +SPLITS_VERSION = "2.0.0" + + @@ -168,0 +173,3 @@ class SplitsWorker(DatasetsBasedWorker): + def __init__(self, app_config: AppConfig): + super().__init__(version=SPLITS_VERSION, app_config=app_config) +
b1d71bd1a5fe60059e05a84bba67837d64d8db1a
Sylvain Lesage
2022-12-20T21:13:34
feat: 🎸 upgrade datasets to 2.8.0 (#666)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 6ad554a3..47d036e1 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-786c9b2" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-3223a24" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 300f2e6a..45b612b1 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -200 +200 @@ firstRows: - replicas: 20 + replicas: 16 @@ -215 +215 @@ parquet: - maxDatasetSize: "500_000_000" # support up to 500 MiB + maxDatasetSize: "5_000_000_000" # support up to 5 GB @@ -222 +222 @@ parquet: - replicas: 10 + replicas: 16 diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 14587a63..21c51fef 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -366 +366 @@ name = "datasets" -version = "2.7.1" +version = "2.8.0" @@ -394 +394 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (>=2.0.1)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "black (>=22.0,<23.0)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "fsspec[s3]", "isort (>=5.0.0)", "jiwer", "langdetect", "librosa", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "nltk", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (>=2021.11.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "spacy (>=3.0.0)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio (<0.12.0)", "transformers", "typer (<0.5.0)", "zstandard"] +dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "black (>=22.0,<23.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] @@ -395,0 +396 @@ docs = ["s3fs"] +metrics-tests = ["Werkzeug (>=1.0.1)", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "sqlalchemy", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] @@ -397 +398 @@ quality = ["black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyam -s3 = ["boto3", "botocore", "fsspec", "s3fs"] +s3 = ["s3fs"] @@ -400 +401 @@ tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (>=2.0.1)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "fsspec[s3]", "jiwer", "langdetect", "librosa", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "nltk", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (>=2021.11.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "spacy (>=3.0.0)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio (<0.12.0)", "transformers", "typer (<0.5.0)", "zstandard"] +tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] @@ -2459 +2460 @@ python-versions = "3.9.15" -content-hash = "412449e8599d28918b44d286a04565b94cd5bc5d6a49bce47fb447e6ae902f14" +content-hash = "61028ae835ec768d358c2ba4cf82ac6122e438b9a5fe4ff4e755483dcb178eba" @@ -2989,2 +2990,2 @@ datasets = [ - {file = "datasets-2.7.1-py3-none-any.whl", hash = "sha256:3d0d2e860cec7c4e77c40de64533d46853f939b6e2311cba4f483f000afae868"}, - {file = "datasets-2.7.1.tar.gz", hash = "sha256:1c79a982d9d9c75fbbaea5b177c2b4c56894289b647fa2845ae2ebd8ac638a0f"}, + {file = "datasets-2.8.0-py3-none-any.whl", hash = "sha256:f36cb362bb5587659bab18e594b6d25d9d28486d735a571319c82efeb5a4e5df"}, + {file = "datasets-2.8.0.tar.gz", hash = "sha256:a843b69593914071f921fc1086fde939f30a63415a34cdda5db3c0acdd58aff2"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index 837b68b7..5ffd565b 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.7.1" } +datasets = { extras = ["audio", "vision"], version = "~2.8.0" } diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index 07582743..b444e379 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -14,5 +14 @@ import datasets.config -from datasets import ( - get_dataset_config_info, - get_dataset_config_names, - load_dataset_builder, -) +from datasets import get_dataset_config_names, get_dataset_infos, load_dataset_builder @@ -130,29 +125,0 @@ class ParquetFile: -# until https://github.com/huggingface/datasets/pull/5333 is merged -def get_dataset_infos(path: str, revision: Optional[str] = None, use_auth_token: Optional[str] = None): - """Get the meta information about a dataset, returned as a dict mapping config name to DatasetInfoDict. - - Args: - path (``str``): a dataset identifier on the Hugging Face Hub (list all available datasets and ids with - ``datasets.list_datasets()``) e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'`` - revision (Optional ``str``): - If specified, the dataset module will be loaded from the datasets repository at this version. - By default: - - it is set to the local version of the lib. - - it will also try to load it from the main branch if it's not available at the local version of the lib. - Specifying a version that is different from your local version of the lib might cause compatibility issues. - use_auth_token (``str``, optional): Optional string to use as Bearer token for remote files on the Datasets - Hub. - """ - config_names = get_dataset_config_names( - path=path, - revision=revision, - use_auth_token=use_auth_token, - ) - return { - config_name: get_dataset_config_info( - path=path, config_name=config_name, revision=revision, use_auth_token=use_auth_token - ) - for config_name in config_names - } - - @@ -538,3 +505 @@ def compute_parquet_response( - builder.download_and_prepare( - file_format="parquet", use_auth_token=hf_token - ) # the parquet files are stored in the cache dir + builder.download_and_prepare(file_format="parquet") # the parquet files are stored in the cache dir
598e9174814d52f729087b58862511990f33cf97
Sylvain Lesage
2022-12-20T20:41:38
Fix empty commits (#665)
diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index 1deb065c..dfd331f3 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -57 +57 @@ jobs: - run: bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" + run: bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" @@ -60 +60 @@ jobs: - run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" + run: bash -c "poetry run pip-audit --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" diff --git a/tools/PythonAudit.mk b/tools/PythonAudit.mk index 831c9274..e4276d6d 100644 --- a/tools/PythonAudit.mk +++ b/tools/PythonAudit.mk @@ -3 +3 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" + bash -c "poetry run pip-audit --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" diff --git a/workers/datasets_based/Makefile b/workers/datasets_based/Makefile index 482f2396..c5dda24f 100644 --- a/workers/datasets_based/Makefile +++ b/workers/datasets_based/Makefile @@ -19 +19 @@ pip-audit: - bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" + bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v --ignore-vuln GHSA-hcpj-qp55-gfph -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py index 3cd1a49d..07582743 100644 --- a/workers/datasets_based/src/datasets_based/workers/parquet.py +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -524,13 +523,0 @@ def compute_parquet_response( - # create the target revision if it does not exist yet - try: - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err - except RevisionNotFoundError: - # create the parquet_ref (refs/convert/parquet) - committer_hf_api.create_branch(repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE) - target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) - - target_sha = target_dataset_info.sha - previous_files = [f.rfilename for f in target_dataset_info.siblings] - @@ -559 +546,15 @@ def compute_parquet_response( - # send the files to the target revision + # create the target revision if it does not exist yet + try: + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + except RevisionNotFoundError: + # create the parquet_ref (refs/convert/parquet) + committer_hf_api.create_branch(repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE) + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + + # delete: + # - the previous files, + previous_files = {f.rfilename for f in target_dataset_info.siblings} + # except: + # - the files we will update, @@ -561,2 +562,2 @@ def compute_parquet_response( - # don't delete the files we will update - files_to_delete = [file for file in previous_files if file not in files_to_add] + # - .gitattributes if present. + files_to_delete = previous_files - set(files_to_add.keys()).union({".gitattributes"}) @@ -563,0 +565,3 @@ def compute_parquet_response( + logging.debug(f"delete_operations={delete_operations}") + + # send the files to the target revision @@ -567,0 +572,2 @@ def compute_parquet_response( + logging.debug(f"add_operations={add_operations}") + @@ -574 +580 @@ def compute_parquet_response( - parent_commit=target_sha, + parent_commit=target_dataset_info.sha,
2d3cfff4b6baf4e3ea5d92658d507dd5e59d63ea
Sylvain Lesage
2022-12-14T14:58:49
docs: ✏️ fix doc (#664)
diff --git a/docs/source/server.mdx b/docs/source/server.mdx index fa81222e..8a1d8a9c 100644 --- a/docs/source/server.mdx +++ b/docs/source/server.mdx @@ -3 +3 @@ -The Datasets Server has two main components that work together to return queries about a dataset instantly: +The [Datasets Server](https://github.com/huggingface/datasets-server) has two main components that work together to return queries about a dataset instantly: @@ -20 +20 @@ There are three jobs: -- `/parquet` corresponds to the `/parquet` endpoint. It downloads the whole dataset, converts it to parquet](https://parquet.apache.org/) and publishes the parquet files to the Hub. +- `/parquet` corresponds to the `/parquet` endpoint. It downloads the whole dataset, converts it to [parquet](https://parquet.apache.org/) and publishes the parquet files to the Hub. @@ -28 +28 @@ Workers are responsible for executing the jobs in the queue. They complete the a -Take a look at the [workers configuration](https://github.com/huggingface/datasets-server/tree/main/workers/first_rows#configuration) for a complete list of the environment variables if you're interested in learning more. +Take a look at the [workers configuration](https://github.com/huggingface/datasets-server/tree/main/workers/datasets_based#configuration) for a complete list of the environment variables if you're interested in learning more.
bd6aae32aa06064d6888d8a1bf8246153e3f9780
Sylvain Lesage
2022-12-13T10:11:54
feat: 🎸 add method to get the duration of the jobs per dataset (#663)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 6a535e57..6ad554a3 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -8 +8 @@ - "admin": "huggingface/datasets-server-services-admin:sha-7b4762b", + "admin": "huggingface/datasets-server-services-admin:sha-adde39b", diff --git a/libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl new file mode 100644 index 00000000..81bba9af Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.11.tar.gz b/libs/libcommon/dist/libcommon-0.5.11.tar.gz new file mode 100644 index 00000000..32af840e Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.11.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index d00388f9..47f4c352 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.5.10" +version = "0.5.11" diff --git a/libs/libcommon/src/libcommon/queue.py b/libs/libcommon/src/libcommon/queue.py index c4ea39be..6173edee 100644 --- a/libs/libcommon/src/libcommon/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -8 +8 @@ from collections import Counter -from datetime import datetime, timezone +from datetime import datetime, timedelta, timezone @@ -11 +11 @@ from operator import itemgetter -from typing import Generic, List, Literal, Optional, Type, TypedDict, TypeVar +from typing import Dict, Generic, List, Literal, Optional, Type, TypedDict, TypeVar @@ -408,0 +409,27 @@ class Queue: + def get_total_duration_per_dataset(self) -> Dict[str, int]: + """Get the total duration for the last 30 days of the finished jobs for every dataset + + Returns: a dictionary where the keys are the dataset names and the values are the total duration of its + finished jobs during the last 30 days, in seconds (integer) + """ + DURATION_IN_DAYS = 30 + return { + d["_id"]: d["total_duration"] + for d in Job.objects( + type=self.type, + status__in=[Status.SUCCESS, Status.ERROR], + finished_at__gt=datetime.now() - timedelta(days=DURATION_IN_DAYS), + ).aggregate( + { + "$group": { + "_id": "$dataset", + "total_duration": { + "$sum": { + "$dateDiff": {"startDate": "$started_at", "endDate": "$finished_at", "unit": "second"} + } + }, + } + } + ) + } + diff --git a/libs/libcommon/tests/test_queue.py b/libs/libcommon/tests/test_queue.py index d985c1fc..130f9889 100644 --- a/libs/libcommon/tests/test_queue.py +++ b/libs/libcommon/tests/test_queue.py @@ -3,0 +4 @@ +import time @@ -149,0 +151,26 @@ def test_count_by_status() -> None: + + +def test_get_total_duration_per_dataset() -> None: + test_type = "test_type" + test_dataset = "test_dataset" + test_config = "test_config" + queue = Queue(test_type) + queue.add_job(dataset=test_dataset, config=test_config, split="split1") + queue.add_job(dataset=test_dataset, config=test_config, split="split2") + queue.add_job(dataset=test_dataset, config=test_config, split="split3") + queue.add_job(dataset=test_dataset, config=test_config, split="split4") + queue.add_job(dataset=test_dataset, config=test_config, split="split5") + started_job_info = queue.start_job() + started_job_info_2 = queue.start_job() + started_job_info_3 = queue.start_job() + _ = queue.start_job() + duration = 2 + time.sleep(duration) + # finish three jobs + queue.finish_job(started_job_info["job_id"], finished_status=Status.SUCCESS) + queue.finish_job(started_job_info_2["job_id"], finished_status=Status.ERROR) + queue.finish_job(started_job_info_3["job_id"], finished_status=Status.SUCCESS) + # cancel one remaining job + queue.cancel_started_jobs() + # check the total duration + assert queue.get_total_duration_per_dataset() == {test_dataset: duration * 3} diff --git a/services/admin/README.md b/services/admin/README.md index 91132ff7..72a96815 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -45,0 +46 @@ The admin service provides endpoints: +- `/jobs-duration-per-dataset/{processing_step}`: give the sum of the jobs duration for every dataset, for all the jobs finished in the last 30 days. diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 78ffd8d1..f2b25760 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -356 +356 @@ name = "libcommon" -version = "0.5.10" +version = "0.5.11" @@ -374 +374 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl" @@ -999 +999 @@ python-versions = "3.9.15" -content-hash = "f315ae701811672df27b0efc97cc4141c2c051cc858a1836a429506b27fba778" +content-hash = "850ad555d794822b0f61abf3822eb23ce71d671e9443e61e2221dbddae691c4b" @@ -1178 +1178 @@ libcommon = [ - {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, + {file = "libcommon-0.5.11-py3-none-any.whl", hash = "sha256:de1a625049e96df154ac31750137bf5e151a6083212c2950dc05b7919beda606"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 945b52fa..edd53ef3 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.11-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 7a09c57b..c3c17e7f 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -17,0 +18 @@ from admin.routes.healthcheck import healthcheck_endpoint +from admin.routes.jobs_duration import create_jobs_duration_per_dataset_endpoint @@ -88,0 +90,12 @@ def create_app() -> Starlette: + + [ + Route( + f"/jobs-duration-per-dataset{processing_step.endpoint}", + endpoint=create_jobs_duration_per_dataset_endpoint( + processing_step=processing_step, + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + ) + for processing_step in processing_steps + ] diff --git a/services/admin/src/admin/routes/jobs_duration.py b/services/admin/src/admin/routes/jobs_duration.py new file mode 100644 index 00000000..548a685e --- /dev/null +++ b/services/admin/src/admin/routes/jobs_duration.py @@ -0,0 +1,42 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue +from starlette.requests import Request +from starlette.responses import Response + +from admin.authentication import auth_check +from admin.utils import ( + AdminCustomError, + Endpoint, + UnexpectedError, + get_json_admin_error_response, + get_json_ok_response, +) + + +def create_jobs_duration_per_dataset_endpoint( + processing_step: ProcessingStep, + max_age: int, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, +) -> Endpoint: + async def jobs_duration_per_dataset_endpoint(request: Request) -> Response: + logging.info("/jobs-duration-per-dataset") + try: + # if auth_check fails, it will raise an exception that will be caught below + auth_check(external_auth_url=external_auth_url, request=request, organization=organization) + return get_json_ok_response( + Queue(type=processing_step.job_type).get_total_duration_per_dataset(), + max_age=max_age, + ) + except AdminCustomError as e: + return get_json_admin_error_response(e, max_age=max_age) + except Exception: + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=max_age) + + return jobs_duration_per_dataset_endpoint
455577de653eb3b531accc29c97a85a07b67c64b
Sylvain Lesage
2022-12-12T17:37:08
feat: 🎸 update the production parameters (#662)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 1e51db46..300f2e6a 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -176 +176 @@ splits: - maxJobsPerNamespace: 4 + maxJobsPerNamespace: 1 @@ -180 +180 @@ splits: - replicas: 10 + replicas: 12 @@ -200 +200 @@ firstRows: - replicas: 18 + replicas: 20 @@ -211 +211 @@ parquet: - blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french" + blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french,Graphcore/vqa,echarlaix/vqa,jimregan/clarinpl_studio,GEM/xsum,Tevatron/wikipedia-squad-corpus,mulcyber/europarl-mono,nateraw/wit,bigscience/P3,tau/mrqa,uva-irlab/trec-cast-2019-multi-turn,vblagoje/wikipedia_snippets_streamed,Tevatron/wikipedia-wq-corpus,malteos/paperswithcode-aspects,Samip/Scotch,iluvvatar/RuREBus,nateraw/quickdraw,tau/scrolls,qanastek/MASSIVE,TalTechNLP/VoxLingua107,shanya/crd3,HugoLaurencon/libri_light,jerpint/imagenette,Leyo/TGIF,DFKI-SLT/few-nerd,crystina-z/msmarco-passage-dl20,HuggingFaceM4/epic_kitchens_100,HuggingFaceM4/yttemporal180m,andreagasparini/librispeech_train_other_only,allenai/nllb,biglam/nls_chapbook_illustrations,winvoker/lvis,Lacito/pangloss,indonesian-nlp/librivox-indonesia,Graphcore/gqa-lxmert,nanom/splittedspanish3bwc,cahya/librivox-indonesia,asapp/slue,sil-ai/audio-keyword-spotting,tner/wikiann,rogerdehe/xfund,arpelarpe/nota,mwhanna/ACT-Thor,sanchit-gandhi/librispeech_asr_clean,echarlaix/gqa-lxmert,shunk031/cocostuff,gigant/m-ailabs_speech_dataset_fr,jimregan/clarinpl_sejmsenat,1aurent/icdar-2011,marinone94/nst_no,jamescalam/unsplash-25k-images,stas/openwebtext-10k,florianbussmann/train_tickets-yu2020pick,benschill/brain-tumor-collection,imvladikon/paranames,PolyAI/evi,bengaliAI/cvbn,Sreyan88/librispeech_asr,superb,mozilla-foundation/common_voice_10_0,darkproger/librispeech_asr,kresnik/librispeech_asr_test,Lehrig/Monkey-Species-Collection,HuggingFaceM4/TGIF,crystina-z/miracl-bm25-negative,cats_vs_dogs,biglam/gallica_literary_fictions,common_language,competition_math,cornell_movie_dialog,evidence_infer_treatment,hebrew_projectbenyehuda,lj_speech,mc4,muchocine,opus_euconst,tab_fact,the_pile,tapaco,turkic_xwmt,web_nlg,vctk,mathaillah/BeritaHoaks-NonHoaks,universal_morphologies,LanceaKing/asvspoof2019,andreagasparini/librispeech_train_clean_only,nuprl/MultiPL-E,SLPL/naab-raw,mteb/results,SocialGrep/the-reddit-climate-change-dataset,bigscience-biomedical/anat_em,crystina-z/xor-tydi-corpus,qanastek/QUAERO,TomTBT/pmc_open_access_section,jamescalam/movielens-25m-ratings,HuggingFaceM4/charades,Tevatron/xor-tydi-corpus,khalidalt/tydiqa-primary,nvm472001/cvdataset-layoutlmv3,Lehrig/GTZAN-Collection,mteb/tatoeba-bitext-mining,sled-umich/Action-Effect,HamdiJr/Egyptian_hieroglyphs,joelito/lextreme,cooleel/xfund_de,oscar,mozilla-foundation/common_voice_7_0,KETI-AIR/vqa,Livingwithmachines/MapReader_Data_SIGSPATIAL_2022,NLPC-UOM/document_alignment_dataset-Sinhala-Tamil-English,miracl/miracl,Muennighoff/flores200,Murple/mmcrsc,mesolitica/dbp,CodedotAI/code_clippy,keshan/clean-si-mc4,yhavinga/ccmatrix,metashift,google/fleurs,HugoLaurencon/libri_light_bytes,biwi_kinect_head_pose,ami,bigscience-biomedical/ebm_pico,HuggingFaceM4/general-pmd-synthetic-testing,crystina-z/mmarco,robertmyers/pile_v2,bigbio/anat_em,biglam/early_printed_books_font_detection,nateraw/imagenet-sketch,jpwahle/dblp-discovery-dataset,andreagasparini/librispeech_test_only,crystina-z/mmarco-corpus,mozilla-foundation/common_voice_6_0,biglam/brill_iconclass,bigscience-biomedical/evidence_inference,HuggingFaceM4/cm4-synthetic-testing,SocialGrep/ten-million-reddit-answers" @@ -219 +219 @@ parquet: - maxJobsPerNamespace: 4 + maxJobsPerNamespace: 2 @@ -222 +222 @@ parquet: - replicas: 16 + replicas: 10
79a68ab5edf183cdedd747c86a28af2925c591e6
Sylvain Lesage
2022-12-09T12:57:24
feat: 🎸 add parquet worker (#651)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index acfce650..93f019c3 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -62 +62,2 @@ jobs: - COMMON_HF_TOKEN: "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" + COMMON_HF_TOKEN: "hf_datasets-server_token" + PARQUET_COMMITTER_HF_TOKEN: "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" @@ -81 +82,2 @@ jobs: - COMMON_HF_TOKEN: "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" + COMMON_HF_TOKEN: "hf_datasets-server_token" + PARQUET_COMMITTER_HF_TOKEN: "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" diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index b8ba9f36..15223062 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -60 +60 @@ The API service exposes the `/webhook` endpoint which is called by the Hub on ev -Note that two job queues exist: +Note that every worker has its own job queue: @@ -62,2 +62,3 @@ Note that two job queues exist: -- `splits`: the job is to refresh a dataset, namely to get the list of [config](https://huggingface.co/docs/datasets/v2.1.0/en/load_hub#select-a-configuration) and [split](https://huggingface.co/docs/datasets/v2.1.0/en/load_hub#select-a-split) names, then to create a new job for every split -- `first-rows`: the job is to get the columns and the first 100 rows of the split +- `/splits`: the job is to refresh a dataset, namely to get the list of [config](https://huggingface.co/docs/datasets/v2.1.0/en/load_hub#select-a-configuration) and [split](https://huggingface.co/docs/datasets/v2.1.0/en/load_hub#select-a-split) names, then to create a new job for every split for the workers that depend on it. +- `/first-rows`: the job is to get the columns and the first 100 rows of the split. +- `/parquet`: the job is to download the dataset, prepare a parquet version of every split (various sharded parquet files), and upload them to the `ref/convert/parquet` "branch" of the dataset repository on the Hub. @@ -65 +66 @@ Note that two job queues exist: -Note also that the workers create local files when the dataset contains images or audios. A shared directory (`COMMON_ASSETS_DIRECTORY`) must therefore be provisioned with sufficient space for the generated files. The `/first-rows` endpoint responses contain URLs to these files, served by the API under the `/assets/` endpoint. +Note also that the workers create local files when the dataset contains images or audios. A shared directory (`ASSETS_STORAGE_DIRECTORY`) must therefore be provisioned with sufficient space for the generated files. The `/first-rows` endpoint responses contain URLs to these files, served by the API under the `/assets/` endpoint. @@ -70 +71 @@ Hence, the working application has: -- M instances of the `splits` worker and N instances of the `first-rows` worker (N should generally be higher than M) +- N1 instances of the `splits` worker, N2 instances of the `first-rows` worker (N2 should generally be higher than N1), N3 instances of the `parquet` worker diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 2a34e6f8..6a535e57 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-cef5577" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-7b4762b" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-cef5577", - "api": "huggingface/datasets-server-services-api:sha-cef5577" + "admin": "huggingface/datasets-server-services-admin:sha-7b4762b", + "api": "huggingface/datasets-server-services-api:sha-7b4762b" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-cef5577" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-786c9b2" diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index b8a476f0..0e0b0499 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -13 +13 @@ secrets: - token: + appHfToken: @@ -15,0 +16,3 @@ secrets: + userHfToken: + fromSecret: false + secretName: "hf-token-francky" @@ -33,0 +37,10 @@ common: +# --- storage admin (to manually inspect the storage, in /data) --- + +storageAdmin: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 + @@ -90,0 +104,8 @@ firstRows: + +parquet: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 3877f00b..1e51db46 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -52 +52 @@ secrets: - token: + appHfToken: @@ -54,0 +55,3 @@ secrets: + userHfToken: + fromSecret: true + secretName: "hf-token-francky" @@ -77,0 +81,14 @@ mongodbMigration: +# --- storage admin (to manually inspect the storage, in /data) --- + +storageAdmin: + nodeSelector: + role-datasets-server: "true" + replicas: 1 + resources: + requests: + cpu: 1 + memory: "256Mi" + limits: + cpu: 1 + memory: "256Mi" + @@ -163 +180 @@ splits: - replicas: 12 + replicas: 10 @@ -179 +196 @@ firstRows: - maxJobsPerNamespace: 17 + maxJobsPerNamespace: 4 @@ -183 +200,23 @@ firstRows: - replicas: 30 + replicas: 18 + resources: + requests: + cpu: 1 + memory: "8Gi" + limits: + cpu: 2 + memory: "30Gi" + +parquet: + # comma-separated list of the blocked datasets. Defaults to empty. + blockedDatasets: "matallanas/linustechtips-transcript-audio-wav,KnutJaegersberg/Interpretable_word_embeddings_large_cskg,ashraf-ali/quran-data,cjvt/cc_gigafida,cmudrc/porous-microstructure-strain-fields,dlwh/MultiLegalPile_Wikipedia_Shuffled,izumaru/os2-datasets,joelito/MultiLegalPile_Wikipedia_Filtered,leviethoang/VBVLSP,nyanko7/yandere-images,severo/wit,texturedesign/td01_natural-ground-textures,Tristan/olm-october-2022-tokenized-1024-exact-dedup-only,Whispering-GPT/linustechtips-transcript-audio,beyond/chinese_clean_passages_80m,bigscience/xP3,dalle-mini/YFCC100M_OpenAI_subset,galman33/gal_yair_166000_256x256_fixed,matallanas/linustechtips-transcript-audio-mp3,mwitiderrick/arXiv,sjpmpzx/qm_ly_gy_soundn,tilos/ASR-CCANTCSC,matallanas/linustechtips-transcript-audio-ogg,bigcode/the-stack,VIMA/VIMA-Data,severo/wit,wmt/europarl,chrisjay/mnist-adversarial-dataset,mwitiderrick/arXiv,HuggingFaceM4/TextCaps,CristianaLazar/librispeech5k_train,texturedesign/td01_natural-ground-textures,cjvt/cc_gigafida,Yehor/ukrainian-tts-lada,YWjimmy/PeRFception-v1,SDbiaseval/dataset-dalle,Pinguin/images,DTU54DL/librispeech5k-augmentated-train-prepared,CristianaLazar/librispeech500,abdusahmbzuai/masc_dev,anonymousdeepcc/DeepCC,bigcode/the-stack-username-to-repo,bigscience/massive-probing-results,dgrnd4/stanford_dog_dataset,gigant/romanian_speech_synthesis_0_8_1,helena-balabin/sentences,icelab/ntrs_meta,joefox/Mozilla_Common_Voice_ru_test_noise,m-aliabbas/idrak_splitted_amy_1,marinone94/nst_sv,mbarnig/lb-de-fr-en-pt-12800-TTS-CORPUS,momilla/Ethereum_transacitons,nev/anime-giph,openclimatefix/nimrod-uk-1km-validation,raghav66/whisper-gpt,strombergnlp/broad_twitter_corpus,z-uo/female-LJSpeech-italian,Champion/vpc2020_clear_anon_speech,DelgadoPanadero/Pokemon,GEM/references,HuggingFaceM4/FairFace,Karavet/ILUR-news-text-classification-corpus,Voicemod/LibriTTS-100-preproc,YWjimmy/PeRFception-v1-1,albertvillanova/TextCaps,allenai/c4,dog/punks,chenghao/scielo_books,YWjimmy/PeRFception-v1-2,bigcode/the-stack-dedup,openclimatefix/era5,Carlisle/msmarco-passage-non-abs,SetFit/mnli,valurank/PoliticalBias_AllSides_Txt,Biomedical-TeMU/ProfNER_corpus_classification,LeoFeng/MLHW_6,pragnakalp/squad_v2_french_translated,textvqa,polinaeterna/vox_lingua,nishita/ade20k-sample,oyk100/ChaSES-data,YWjimmy/PeRFception-v1-3,YWjimmy/PeRFception-ScanNet,ChaiML/AnthropicRLHFPreferenceData,voidful/librispeech_asr_text,Isma/librispeech_1000_seed_42,Graphcore/vqa-lxmert,Tevatron/wikipedia-curated-corpus,adamlin/daily_dialog,cameronbc/synthtiger,clarin-pl/multiwiki_90k,echarlaix/vqa-lxmert,gigant/african_accented_french" + # comma-separated list of the supported datasets. If empty, all the datasets are processed. Defaults to empty. + supportedDatasets: "" + # the maximum size of the supported datasets. Bigger datasets, or datasets that cannot provide the size, are ignored. + maxDatasetSize: "500_000_000" # support up to 500 MiB + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 4 + nodeSelector: + role-datasets-server: "true" + replicas: 16 @@ -190,0 +230 @@ firstRows: + tolerations: [] diff --git a/chart/static-files/openapi.json b/chart/static-files/openapi.json index 5625ce8e..d27575e6 100644 --- a/chart/static-files/openapi.json +++ b/chart/static-files/openapi.json @@ -815,0 +816,35 @@ + }, + "ParquetFilesResponse": { + "type": "object", + "required": ["parquet_files"], + "properties": { + "parquet_files": { + "type": "array", + "items": { "$ref": "#/components/schemas/ParquetFileItem" } + } + } + }, + "ParquetFileItem": { + "type": "object", + "required": ["dataset", "config", "split", "url", "filename", "size"], + "properties": { + "dataset": { + "type": "string" + }, + "config": { + "type": "string" + }, + "split": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + }, + "filename": { + "type": "string" + }, + "size": { + "type": "integer" + } + } @@ -2018,0 +2054,367 @@ + "/parquet": { + "get": { + "summary": "List of parquet files", + "description": "The dataset is converted to the parquet format. The endpoint gives the list of the parquet files.", + "externalDocs": { + "description": "See Parquet (Hub docs)", + "url": "https://huggingface.co/docs/datasets-server/parquet" + }, + "operationId": "listParquetFiles", + "security": [ + {}, + { + "HuggingFaceCookie": [] + }, + { + "HuggingFaceToken": [] + } + ], + "parameters": [ + { + "name": "dataset", + "in": "query", + "description": "The identifier of the dataset on the Hub.", + "required": true, + "schema": { "type": "string" }, + "examples": { + "glue": { "summary": "a canonical dataset", "value": "glue" }, + "Helsinki-NLP/tatoeba_mt": { + "summary": "a namespaced dataset", + "value": "Helsinki-NLP/tatoeba_mt" + } + } + } + ], + "responses": { + "200": { + "description": "A list of parquet files.</br>Beware: the response is not paginated.", + "headers": { + "Cache-Control": { "$ref": "#/components/headers/Cache-Control" }, + "Access-Control-Allow-Origin": { + "$ref": "#/components/headers/Access-Control-Allow-Origin" + } + }, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ParquetFilesResponse" + }, + "examples": { + "duorc": { + "summary": "duorc: six parquet files, one per split", + "value": { + "parquet_files": [ + { + "dataset": "duorc", + "config": "ParaphraseRC", + "split": "test", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-test.parquet", + "filename": "duorc-test.parquet", + "size": 6136590 + }, + { + "dataset": "duorc", + "config": "ParaphraseRC", + "split": "train", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-train.parquet", + "filename": "duorc-train.parquet", + "size": 26005667 + }, + { + "dataset": "duorc", + "config": "ParaphraseRC", + "split": "validation", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-validation.parquet", + "filename": "duorc-validation.parquet", + "size": 5566867 + }, + { + "dataset": "duorc", + "config": "SelfRC", + "split": "test", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/SelfRC/duorc-test.parquet", + "filename": "duorc-test.parquet", + "size": 3035735 + }, + { + "dataset": "duorc", + "config": "SelfRC", + "split": "train", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/SelfRC/duorc-train.parquet", + "filename": "duorc-train.parquet", + "size": 14851719 + }, + { + "dataset": "duorc", + "config": "SelfRC", + "split": "validation", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/SelfRC/duorc-validation.parquet", + "filename": "duorc-validation.parquet", + "size": 3114389 + } + ] + } + }, + "sharded": { + "summary": "alexandrainst/danish-wit: the parquet file for the train split is partitioned into 9 shards", + "value": { + "parquet_files": [ + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "test", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-test.parquet", + "filename": "parquet-test.parquet", + "size": 48781933 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00000-of-00009.parquet", + "filename": "parquet-train-00000-of-00009.parquet", + "size": 937127291 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00001-of-00009.parquet", + "filename": "parquet-train-00001-of-00009.parquet", + "size": 925920565 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00002-of-00009.parquet", + "filename": "parquet-train-00002-of-00009.parquet", + "size": 940390661 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00003-of-00009.parquet", + "filename": "parquet-train-00003-of-00009.parquet", + "size": 934549621 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00004-of-00009.parquet", + "filename": "parquet-train-00004-of-00009.parquet", + "size": 493004154 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00005-of-00009.parquet", + "filename": "parquet-train-00005-of-00009.parquet", + "size": 942848888 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00006-of-00009.parquet", + "filename": "parquet-train-00006-of-00009.parquet", + "size": 933373843 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00007-of-00009.parquet", + "filename": "parquet-train-00007-of-00009.parquet", + "size": 936939176 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00008-of-00009.parquet", + "filename": "parquet-train-00008-of-00009.parquet", + "size": 946933048 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "val", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-val.parquet", + "filename": "parquet-val.parquet", + "size": 11437355 + } + ] + } + } + } + } + } + }, + "401": { + "description": "If the external authentication step on the Hugging Face Hub failed, and no authentication mechanism has been provided. Retry with authentication.", + "headers": { + "Cache-Control": { + "$ref": "#/components/headers/Cache-Control" + }, + "Access-Control-Allow-Origin": { + "$ref": "#/components/headers/Access-Control-Allow-Origin" + }, + "X-Error-Code": { + "$ref": "#/components/headers/X-Error-Code-splits-401" + } + }, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CustomError" + }, + "examples": { + "inexistent-dataset": { + "summary": "The dataset does not exist.", + "value": { + "error": "The dataset does not exist, or is not accessible without authentication (private or gated). Please retry with authentication." + } + }, + "gated-dataset": { + "summary": "The dataset is gated.", + "value": { + "error": "The dataset does not exist, or is not accessible without authentication (private or gated). Please retry with authentication." + } + }, + "private-dataset": { + "summary": "The dataset is private.", + "value": { + "error": "The dataset does not exist, or is not accessible without authentication (private or gated). Please retry with authentication." + } + } + } + } + } + }, + "404": { + "description": "If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access.", + "headers": { + "Cache-Control": { + "$ref": "#/components/headers/Cache-Control" + }, + "Access-Control-Allow-Origin": { + "$ref": "#/components/headers/Access-Control-Allow-Origin" + }, + "X-Error-Code": { + "$ref": "#/components/headers/X-Error-Code-splits-404" + } + }, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CustomError" + }, + "examples": { + "inexistent-dataset": { + "summary": "The dataset does not exist, while authentication was provided in the request.", + "value": { + "error": "The dataset does not exist, or is not accessible with the current credentials (private or gated)." + } + }, + "gated-dataset": { + "summary": "The dataset is private, while authentication was provided in the request.", + "value": { + "error": "The dataset does not exist, or is not accessible with the current credentials (private or gated)." + } + }, + "private-dataset": { + "summary": "The dataset is private, while authentication was provided in the request.", + "value": { + "error": "The dataset does not exist, or is not accessible with the current credentials (private or gated)." + } + } + } + } + } + }, + "422": { + "description": "The `dataset` parameter has not been provided.", + "headers": { + "Cache-Control": { + "$ref": "#/components/headers/Cache-Control" + }, + "Access-Control-Allow-Origin": { + "$ref": "#/components/headers/Access-Control-Allow-Origin" + }, + "X-Error-Code": { + "$ref": "#/components/headers/X-Error-Code-splits-422" + } + }, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CustomError" + }, + "examples": { + "missing-parameter": { + "summary": "The dataset parameter is missing.", + "value": { "error": "Parameter 'dataset' is required" } + }, + "empty-parameter": { + "summary": "The dataset parameter is empty (?dataset=).", + "value": { "error": "Parameter 'dataset' is required" } + } + } + } + } + }, + "500": { + "description": "The server crashed, the response still hasn't been generated (the process is asynchronous), or the response couldn't be generated successfully due to an error in the dataset itself. The client can retry after a time, in particular in the case of the response still being processed. If the error does not vanish, it's possibly due to a bug in the API software or in the dataset, and should be reported.", + "headers": { + "Cache-Control": { + "$ref": "#/components/headers/Cache-Control" + }, + "Access-Control-Allow-Origin": { + "$ref": "#/components/headers/Access-Control-Allow-Origin" + }, + "X-Error-Code": { + "$ref": "#/components/headers/X-Error-Code-splits-500" + } + }, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CustomError" + }, + "examples": { + "not-ready": { + "summary": "the response is not ready yet.", + "value": { + "error": "The response is not ready yet. Please retry later." + } + }, + "internal": { + "summary": "internal error", + "value": { + "error": "Unexpected error." + } + } + } + }, + "text/plain": { + "schema": { + "$ref": "#/components/schemas/ServerErrorResponse" + }, + "examples": { + "internal": { + "summary": "internal error", + "value": { + "error": "Internal Server Error" + } + } + } + } + } + } + } + } + }, diff --git a/chart/templates/_envAssets.tpl b/chart/templates/_envAssets.tpl new file mode 100644 index 00000000..07b4c7fd --- /dev/null +++ b/chart/templates/_envAssets.tpl @@ -0,0 +1,9 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envAssets" -}} +- name: ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" +- name: ASSETS_STORAGE_DIRECTORY + value: {{ .Values.assets.storageDirectory | quote }} +{{- end -}} diff --git a/chart/templates/_envCache.tpl b/chart/templates/_envCache.tpl index d5b5ecf4..4b539a53 100644 --- a/chart/templates/_envCache.tpl +++ b/chart/templates/_envCache.tpl @@ -5,2 +4,0 @@ -- name: CACHE_ASSETS_DIRECTORY - value: {{ .Values.cache.assetsDirectory | quote }} diff --git a/chart/templates/_envCommon.tpl b/chart/templates/_envCommon.tpl index 19c75f19..185f168e 100644 --- a/chart/templates/_envCommon.tpl +++ b/chart/templates/_envCommon.tpl @@ -5,2 +4,0 @@ -- name: COMMON_ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" @@ -12 +10 @@ - {{- if .Values.secrets.token.fromSecret }} + {{- if .Values.secrets.appHfToken.fromSecret }} @@ -15 +13 @@ - name: {{ .Values.secrets.token.secretName | quote }} + name: {{ .Values.secrets.appHfToken.secretName | quote }} @@ -19 +17 @@ - value: {{ .Values.secrets.token.value }} + value: {{ .Values.secrets.appHfToken.value }} diff --git a/chart/templates/_envDatasetsBased.tpl b/chart/templates/_envDatasetsBased.tpl new file mode 100644 index 00000000..05d1ed29 --- /dev/null +++ b/chart/templates/_envDatasetsBased.tpl @@ -0,0 +1,12 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envDatasetsBased" -}} +# the size should remain so small that we don't need to worry about putting it on an external storage +# note that the /tmp directory is not shared among the pods +- name: HF_MODULES_CACHE + value: "/tmp/modules-cache" +- name: NUMBA_CACHE_DIR + value: "/tmp/numba-cache" +{{- end -}} + diff --git a/chart/templates/_envDatasetsWorker.tpl b/chart/templates/_envDatasetsWorker.tpl deleted file mode 100644 index 89c89974..00000000 --- a/chart/templates/_envDatasetsWorker.tpl +++ /dev/null @@ -1,13 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -{{- define "envDatasetsWorker" -}} -- name: HF_DATASETS_CACHE - value: {{ .Values.hfDatasetsCache | quote }} -- name: HF_MODULES_CACHE - value: "/tmp/modules-cache" - # the size should remain so small that we don't need to worry about putting it on an external storage - # see https://github.com/huggingface/datasets-server/issues/248 -- name: NUMBA_CACHE_DIR - value: {{ .Values.numbaCacheDirectory | quote }} -{{- end -}} diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index 4e20e03f..3d9add6e 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -52,0 +53,5 @@ app: "{{ .Release.Name }}-reverse-proxy" +{{- define "labels.storageAdmin" -}} +{{ include "labels" . }} +app: "{{ .Release.Name }}-storage-admin" +{{- end -}} + @@ -77,0 +83,5 @@ app: "{{ include "release" . }}-worker-first-rows" +{{- define "labels.parquet" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-worker-parquet" +{{- end -}} + @@ -96,10 +106 @@ The assets/ subpath in the NFS -The cache/ subpath in the NFS -- in a subdirectory named as the chart (datasets-server/), and below it, -- in a subdirectory named as the Release, so that Releases will not share the same dir -*/}} -{{- define "cache.datasets.subpath" -}} -{{- printf "%s/%s/%s/" .Chart.Name .Release.Name "cache-datasets-2" }} -{{- end }} - -{{/* -The numba-cache/ subpath in the NFS +The datasets library will use this directory as a cache @@ -109,2 +110,2 @@ The numba-cache/ subpath in the NFS -{{- define "cache.numba.subpath" -}} -{{- printf "%s/%s/%s/" .Chart.Name .Release.Name "cache-numba-2" }} +{{- define "cache.subpath" -}} +{{- printf "%s/%s/%s/" .Chart.Name .Release.Name "cache" }} diff --git a/chart/templates/_initContainerCache.tpl b/chart/templates/_initContainerCache.tpl index c56aa404..6df8979b 100644 --- a/chart/templates/_initContainerCache.tpl +++ b/chart/templates/_initContainerCache.tpl @@ -15 +15 @@ - subPath: "{{ include "cache.datasets.subpath" . }}" + subPath: "{{ include "cache.subpath" . }}" diff --git a/chart/templates/_volumeMountAssets.tpl b/chart/templates/_volumeMountAssets.tpl index 1112b979..ab4911bb 100644 --- a/chart/templates/_volumeMountAssets.tpl +++ b/chart/templates/_volumeMountAssets.tpl @@ -5 +5 @@ -- mountPath: {{ .Values.cache.assetsDirectory | quote }} +- mountPath: {{ .Values.assets.storageDirectory | quote }} @@ -13 +13 @@ -- mountPath: {{ .Values.cache.assetsDirectory | quote }} +- mountPath: {{ .Values.assets.storageDirectory | quote }} diff --git a/chart/templates/_volumeMountNumbaCache.tpl b/chart/templates/_volumeMountCache.tpl similarity index 50% rename from chart/templates/_volumeMountNumbaCache.tpl rename to chart/templates/_volumeMountCache.tpl index 2bc52f2a..7a01c872 100644 --- a/chart/templates/_volumeMountNumbaCache.tpl +++ b/chart/templates/_volumeMountCache.tpl @@ -4,2 +4,2 @@ -{{- define "volumeMountNumbaCache" -}} -- mountPath: {{ .Values.numbaCacheDirectory | quote }} +{{- define "volumeMountCache" -}} +- mountPath: {{ .Values.cacheDirectory | quote }} @@ -8 +8 @@ - subPath: "{{ include "cache.numba.subpath" . }}" + subPath: "{{ include "cache.subpath" . }}" diff --git a/chart/templates/_volumeMountDatasetsCache.tpl b/chart/templates/_volumeMountDatasetsCache.tpl deleted file mode 100644 index 0377fd2c..00000000 --- a/chart/templates/_volumeMountDatasetsCache.tpl +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -{{- define "volumeMountDatasetsCache" -}} -- mountPath: {{ .Values.hfDatasetsCache | quote }} - mountPropagation: None - name: data - subPath: "{{ include "cache.datasets.subpath" . }}" - readOnly: false -{{- end -}} diff --git a/chart/templates/jobs/mongodb-migration/_container.tpl b/chart/templates/jobs/mongodb-migration/_container.tpl index 7b44e0e9..5e52b507 100644 --- a/chart/templates/jobs/mongodb-migration/_container.tpl +++ b/chart/templates/jobs/mongodb-migration/_container.tpl @@ -28,2 +27,0 @@ - volumeMounts: - {{ include "volumeMountAssetsRO" . | nindent 2 }} diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml index e11dbb25..365a798c 100644 --- a/chart/templates/jobs/mongodb-migration/job.yaml +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -21 +20,0 @@ spec: - initContainers: {{ include "initContainerAssets" . | nindent 8 }} diff --git a/chart/templates/reverse-proxy/_container.tpl b/chart/templates/reverse-proxy/_container.tpl index f18e1231..8b7be35c 100644 --- a/chart/templates/reverse-proxy/_container.tpl +++ b/chart/templates/reverse-proxy/_container.tpl @@ -10 +10 @@ - value: {{ .Values.cache.assetsDirectory | quote }} + value: {{ .Values.assets.storageDirectory | quote }} diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index 2c08de05..ff5e60eb 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -8,0 +9 @@ + {{ include "envAssets" . | nindent 2 }} diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl index d7d3c713..06b033d6 100644 --- a/chart/templates/services/api/_container.tpl +++ b/chart/templates/services/api/_container.tpl @@ -29 +28,0 @@ - volumeMounts: {{ include "volumeMountAssetsRO" . | nindent 2 }} diff --git a/chart/templates/services/api/deployment.yaml b/chart/templates/services/api/deployment.yaml index 0fb76b4d..1d055ee1 100644 --- a/chart/templates/services/api/deployment.yaml +++ b/chart/templates/services/api/deployment.yaml @@ -26 +25,0 @@ spec: - initContainers: {{ include "initContainerAssets" . | nindent 8 }} diff --git a/chart/templates/_initContainerNumbaCache.tpl b/chart/templates/storage-admin/_container.tpl similarity index 51% rename from chart/templates/_initContainerNumbaCache.tpl rename to chart/templates/storage-admin/_container.tpl index 77e94fb7..317cff29 100644 --- a/chart/templates/_initContainerNumbaCache.tpl +++ b/chart/templates/storage-admin/_container.tpl @@ -4,2 +4,2 @@ -{{- define "initContainerNumbaCache" -}} -- name: prepare-numba-cache +{{- define "containerStorageAdmin" -}} +- name: "{{ include "name" . }}-storage-admin" @@ -8,5 +8,2 @@ - command: ["/bin/sh", "-c"] - args: - - chown {{ .Values.uid }}:{{ .Values.gid }} /mounted-path; - volumeMounts: - - mountPath: /mounted-path + volumeMounts: + - mountPath: /data @@ -15 +11,0 @@ - subPath: "{{ include "cache.numba.subpath" . }}" @@ -20,0 +17,4 @@ + resources: {{ toYaml .Values.storageAdmin.resources | nindent 4 }} + command: + - 'sleep' + - 'infinity' diff --git a/chart/templates/storage-admin/deployment.yaml b/chart/templates/storage-admin/deployment.yaml new file mode 100644 index 00000000..5c21e304 --- /dev/null +++ b/chart/templates/storage-admin/deployment.yaml @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.storageAdmin" . | nindent 4 }} + name: "{{ include "release" . }}-storage-admin" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.storageAdmin.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.storageAdmin" . | nindent 6 }} + template: + metadata: + labels: {{ include "labels.storageAdmin" . | nindent 8 }} + spec: + containers: {{ include "containerStorageAdmin" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.storageAdmin.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.storageAdmin.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 6 }} diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index a481b6c4..9b2a98a5 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -6 +6 @@ - image: {{ .Values.dockerImage.workers.firstRows }} + image: {{ .Values.dockerImage.workers.datasets_based }} @@ -11,0 +12 @@ + {{ include "envAssets" . | nindent 2 }} @@ -16 +17,3 @@ - {{ include "envDatasetsWorker" . | nindent 2 }} + {{ include "envDatasetsBased" . | nindent 2 }} + - name: DATASETS_BASED_HF_DATASETS_CACHE + value: {{ printf "%s/first-rows/datasets" .Values.cacheDirectory | quote }} @@ -33,2 +36 @@ - {{ include "volumeMountDatasetsCache" . | nindent 2 }} - {{ include "volumeMountNumbaCache" . | nindent 2 }} + {{ include "volumeMountCache" . | nindent 2 }} diff --git a/chart/templates/worker/first-rows/deployment.yaml b/chart/templates/worker/first-rows/deployment.yaml index 63930848..277ea7af 100644 --- a/chart/templates/worker/first-rows/deployment.yaml +++ b/chart/templates/worker/first-rows/deployment.yaml @@ -26 +25,0 @@ spec: - {{ include "initContainerNumbaCache" . | nindent 8 }} diff --git a/chart/templates/worker/parquet/_container.tpl b/chart/templates/worker/parquet/_container.tpl new file mode 100644 index 00000000..59241834 --- /dev/null +++ b/chart/templates/worker/parquet/_container.tpl @@ -0,0 +1,52 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerWorkerParquet" -}} +- name: "{{ include "name" . }}-worker-parquet" + image: {{ .Values.dockerImage.workers.datasets_based }} + imagePullPolicy: IfNotPresent + env: + - name: DATASETS_BASED_ENDPOINT + value: "/parquet" + # ^ hard-coded + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} + {{ include "envWorker" . | nindent 2 }} + {{ include "envDatasetsBased" . | nindent 2 }} + - name: DATASETS_BASED_HF_DATASETS_CACHE + value: {{ printf "%s/parquet/datasets" .Values.cacheDirectory | quote }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} + # overridden + value: {{ .Values.parquet.queue.maxJobsPerNamespace | quote }} + - name: PARQUET_BLOCKED_DATASETS + value: {{ .Values.parquet.blockedDatasets | quote }} + - name: PARQUET_COMMIT_MESSAGE + value: {{ .Values.parquet.commitMessage | quote }} + - name: PARQUET_COMMITTER_HF_TOKEN + {{- if .Values.secrets.userHfToken.fromSecret }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.userHfToken.secretName | quote }} + key: HF_TOKEN + optional: false + {{- else }} + value: {{ .Values.secrets.userHfToken.value }} + {{- end }} + - name: PARQUET_MAX_DATASET_SIZE + value: {{ .Values.parquet.maxDatasetSize | quote }} + - name: PARQUET_SOURCE_REVISION + value: {{ .Values.parquet.sourceRevision | quote }} + - name: PARQUET_SUPPORTED_DATASETS + value: {{ .Values.parquet.supportedDatasets | quote }} + - name: PARQUET_TARGET_REVISION + value: {{ .Values.parquet.targetRevision | quote }} + - name: PARQUET_URL_TEMPLATE + value: {{ .Values.parquet.urlTemplate | quote }} + volumeMounts: + {{ include "volumeMountCache" . | nindent 2 }} + securityContext: + allowPrivilegeEscalation: false + resources: {{ toYaml .Values.parquet.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/worker/parquet/deployment.yaml b/chart/templates/worker/parquet/deployment.yaml new file mode 100644 index 00000000..db27dd94 --- /dev/null +++ b/chart/templates/worker/parquet/deployment.yaml @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: {{ include "labels.parquet" . | nindent 4 }} + name: "{{ include "release" . }}-worker-parquet" + namespace: {{ .Release.Namespace }} +spec: + progressDeadlineSeconds: 600 + replicas: {{ .Values.parquet.replicas }} + revisionHistoryLimit: 10 + selector: + matchLabels: {{ include "labels.parquet" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + labels: {{ include "labels.parquet" . | nindent 8 }} + spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: + {{ include "initContainerCache" . | nindent 8 }} + containers: {{ include "containerWorkerParquet" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.parquet.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.parquet.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 86fb4c18..2c25f8c7 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -16 +16,3 @@ - {{ include "envDatasetsWorker" . | nindent 2 }} + {{ include "envDatasetsBased" . | nindent 2 }} + - name: DATASETS_BASED_HF_DATASETS_CACHE + value: {{ printf "%s/splits/datasets" .Values.cacheDirectory | quote }} @@ -22,3 +24 @@ - {{ include "volumeMountAssetsRO" . | nindent 2 }} - {{ include "volumeMountDatasetsCache" . | nindent 2 }} - {{ include "volumeMountNumbaCache" . | nindent 2 }} + {{ include "volumeMountCache" . | nindent 2 }} diff --git a/chart/templates/worker/splits/deployment.yaml b/chart/templates/worker/splits/deployment.yaml index 526b36f6..d0ec3103 100644 --- a/chart/templates/worker/splits/deployment.yaml +++ b/chart/templates/worker/splits/deployment.yaml @@ -24 +23,0 @@ spec: - {{ include "initContainerAssets" . | nindent 8 }} @@ -26 +24,0 @@ spec: - {{ include "initContainerNumbaCache" . | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index 692b727f..52e2f992 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -10 +10 @@ secrets: - token: + appHfToken: @@ -13,0 +14,4 @@ secrets: + userHfToken: + fromSecret: false + secretName: "hf-token-francky" + value: hf_ @@ -45,2 +49 @@ dockerImage: - splits: "" - firstRows: "" + datasets_based: "" @@ -49,2 +51,0 @@ cache: - # Directory on the shared storage (audio files and images) - assetsDirectory: "/assets" @@ -68 +69 @@ worker: -common: +assets: @@ -70 +71,5 @@ common: - # assetsBaseUrl: "not used for now" + # baseUrl: "not used for now" + # Directory on the shared storage (audio files and images) + storageDirectory: "/assets" + +common: @@ -76,6 +81,2 @@ common: -# Directory where the `datasets` library will store the cached datasets data -hfDatasetsCache: "/hf-datasets-cache" -# Directory where the `datasets` library will store the cached datasets scripts -#hfModulesCache: "not used" -# Directory where the `numba` decorators (used by `librosa`) can write cache -numbaCacheDirectory: "/numba-cache" +# Directory where the cache data will be stored +cacheDirectory: "/datasets-server-cache" @@ -96,0 +98,15 @@ mongodbMigration: +# --- storage admin (to manually inspect the storage, in /data) --- + +storageAdmin: + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + service: + type: NodePort + annotations: {} + tolerations: [] + @@ -235,0 +252,28 @@ firstRows: + +parquet: + # comma-separated list of the blocked datasets. Defaults to empty. + blockedDatasets: "" + # the git commit message when the parquet files are uploaded to the Hub. Defaults to `Update parquet files`. + commitMessage: "Update parquet files" + # the maximum size of the supported datasets. Bigger datasets, or datasets that cannot provide the size, are ignored. + maxDatasetSize: "100_000_000" + # the git revision of the dataset to use to prepare the parquet files. Defaults to `main`. + sourceRevision: "main" + # comma-separated list of the supported datasets. If empty, all the datasets are processed. Defaults to empty. + supportedDatasets: "" + # the git revision of the dataset where to store the parquet files. Make sure the hf_token (see the "Common" section) allows to write there. Defaults to `refs/convert/parquet`. + targetRevision: "refs/convert/parquet" + # the URL template to build the parquet file URLs. Defaults to `/datasets/%s/resolve/%s/%s`. + urlTemplate: "/datasets/%s/resolve/%s/%s" + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 + nodeSelector: {} + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] diff --git a/docs/source/_toctree.yml b/docs/source/_toctree.yml index 2b5b26a4..b35ddb65 100644 --- a/docs/source/_toctree.yml +++ b/docs/source/_toctree.yml @@ -14,0 +15,2 @@ + - local: parquet + title: List parquet files diff --git a/docs/source/index.mdx b/docs/source/index.mdx index 80bea0e9..abf155b2 100644 --- a/docs/source/index.mdx +++ b/docs/source/index.mdx @@ -8,0 +9 @@ Let Datasets Server take care of the heavy lifting so you can: +* Access the dataset as parquet files. diff --git a/docs/source/parquet.mdx b/docs/source/parquet.mdx new file mode 100644 index 00000000..eae81aff --- /dev/null +++ b/docs/source/parquet.mdx @@ -0,0 +1,236 @@ +# List parquet files + +Datasets can be published in any format (CSV, JSONL, directories of images, etc.) on the Hub, and people generally use the [`datasets` library](https://huggingface.co/docs/datasets/) to access the data. To make it even easier, the datasets-server automatically converts every dataset to the [Parquet](https://parquet.apache.org/) format and publishes the parquet files on the Hub (in a specific branch: `ref/convert/parquet`). + +This guide shows you how to use Datasets Server's `/parquet` endpoint to retrieve the list of a dataset's parquet files programmatically. Feel free to also try it out with [Postman](https://www.postman.com/huggingface/workspace/hugging-face-apis/request/23242779-f0cde3b9-c2ee-4062-aaca-65c4cfdd96f8), [RapidAPI](https://rapidapi.com/hugging-face-hugging-face-default/api/hugging-face-datasets-api), or [ReDoc](https://redocly.github.io/redoc/?url=https://datasets-server.huggingface.co/openapi.json#operation/listSplits) + +The `/parquet` endpoint accepts the dataset name as its query parameter: + +<inferencesnippet> +<python> +```python +import requests +headers = {"Authorization": f"Bearer {API_TOKEN}"} +API_URL = "https://datasets-server.huggingface.co/parquet?dataset=duorc" +def query(): + response = requests.request("GET", API_URL, headers=headers) + return response.json() +data = query() +``` +</python> +<js> +```js +import fetch from "node-fetch"; +async function query(data) { + const response = await fetch( + "https://datasets-server.huggingface.co/parquet?dataset=duorc", + { + headers: { Authorization: `Bearer ${API_TOKEN}` }, + method: "GET" + } + ); + const result = await response.json(); + return result; +} +query().then((response) => { + console.log(JSON.stringify(response)); +}); +``` +</js> +<curl> +```curl +curl https://datasets-server.huggingface.co/parquet?dataset=duorc \ + -X GET \ + -H "Authorization: Bearer ${API_TOKEN}" +``` +</curl> +</inferencesnippet> + +The endpoint response is a JSON containing a list of the dataset's parquet files. For example, the [duorc](https://huggingface.co/datasets/duorc) dataset has six parquet files, which corresponds to the `train`, `validation` and `test` splits of its two configurations (see the [/splits](./splits) guide): + +```json +{ + "parquet_files": [ + { + "dataset": "duorc", + "config": "ParaphraseRC", + "split": "test", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-test.parquet", + "filename": "duorc-test.parquet", + "size": 6136590 + }, + { + "dataset": "duorc", + "config": "ParaphraseRC", + "split": "train", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-train.parquet", + "filename": "duorc-train.parquet", + "size": 26005667 + }, + { + "dataset": "duorc", + "config": "ParaphraseRC", + "split": "validation", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-validation.parquet", + "filename": "duorc-validation.parquet", + "size": 5566867 + }, + { + "dataset": "duorc", + "config": "SelfRC", + "split": "test", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/SelfRC/duorc-test.parquet", + "filename": "duorc-test.parquet", + "size": 3035735 + }, + { + "dataset": "duorc", + "config": "SelfRC", + "split": "train", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/SelfRC/duorc-train.parquet", + "filename": "duorc-train.parquet", + "size": 14851719 + }, + { + "dataset": "duorc", + "config": "SelfRC", + "split": "validation", + "url": "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/SelfRC/duorc-validation.parquet", + "filename": "duorc-validation.parquet", + "size": 3114389 + } + ] +} +``` + +The dataset can then be accessed directly through the parquet files: + +```python +import pandas as pd +url = "https://huggingface.co/datasets/duorc/resolve/refs%2Fconvert%2Fparquet/ParaphraseRC/duorc-train.parquet" +pd.read_parquet(url).title.value_counts().head() +# Dracula 422 +# The Three Musketeers 412 +# Superman 193 +# Jane Eyre 190 +# The Thing 189 +# Name: title, dtype: int64 +``` + +## Sharded parquet files + +The big datasets are partitioned in parquet files (shards) of about 1 GiB. The file name gives the index of the shard and the total number of shards. For example, the `train` split of the [`alexandrainst/danish-wit`](https://datasets-server.huggingface.co/parquet?dataset=alexandrainst/danish-wit) dataset is partitioned into 9 shards, from `parquet-train-00000-of-00009.parquet` to `parquet-train-00008-of-00009.parquet`: + +```json +{ + "parquet_files": [ + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "test", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-test.parquet", + "filename": "parquet-test.parquet", + "size": 48781933 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00000-of-00009.parquet", + "filename": "parquet-train-00000-of-00009.parquet", + "size": 937127291 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00001-of-00009.parquet", + "filename": "parquet-train-00001-of-00009.parquet", + "size": 925920565 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00002-of-00009.parquet", + "filename": "parquet-train-00002-of-00009.parquet", + "size": 940390661 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00003-of-00009.parquet", + "filename": "parquet-train-00003-of-00009.parquet", + "size": 934549621 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00004-of-00009.parquet", + "filename": "parquet-train-00004-of-00009.parquet", + "size": 493004154 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00005-of-00009.parquet", + "filename": "parquet-train-00005-of-00009.parquet", + "size": 942848888 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00006-of-00009.parquet", + "filename": "parquet-train-00006-of-00009.parquet", + "size": 933373843 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00007-of-00009.parquet", + "filename": "parquet-train-00007-of-00009.parquet", + "size": 936939176 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "train", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-train-00008-of-00009.parquet", + "filename": "parquet-train-00008-of-00009.parquet", + "size": 946933048 + }, + { + "dataset": "alexandrainst/danish-wit", + "config": "alexandrainst--danish-wit", + "split": "val", + "url": "https://huggingface.co/datasets/alexandrainst/danish-wit/resolve/refs%2Fconvert%2Fparquet/alexandrainst--danish-wit/parquet-val.parquet", + "filename": "parquet-val.parquet", + "size": 11437355 + } + ] +} +``` + +The shards can be concatenated: + +```python +import pandas as pd +import requests +r = requests.get("https://datasets-server.huggingface.co/parquet?dataset=alexandrainst/danish-wit") +j = r.json() +urls = [f['url'] for f in j['parquet_files'] if f['split'] == 'train'] +dfs = [pd.read_parquet(url) for url in urls] +df = pd.concat(dfs) +df.mime_type.value_counts().head() +# image/jpeg 140919 +# image/png 18608 +# image/svg+xml 6171 +# image/gif 1030 +# image/webp 1 +# Name: mime_type, dtype: int64 +``` diff --git a/docs/source/quick_start.mdx b/docs/source/quick_start.mdx index 5b2e58ef..4d41ce04 100644 --- a/docs/source/quick_start.mdx +++ b/docs/source/quick_start.mdx @@ -5,3 +5,4 @@ In this quickstart, you'll learn how to use the Datasets Server's REST API to: -* Check whether a dataset on the Hub is functional. -* Return the configuration and splits of a dataset. -* Preview the first 100 rows of a dataset. +- Check whether a dataset on the Hub is functional. +- Return the configuration and splits of a dataset. +- Preview the first 100 rows of a dataset. +- Access the dataset as parquet files. @@ -11,6 +12,7 @@ Each feature is served through an endpoint summarized in the table below: -| Endpoint | Method | Description | Query parameters | | -|-----------------------------|--------|----------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------|---| -| [/valid](./valid) | GET | Get the list of datasets hosted in the Hub and supported by the datasets server. | none | | -| [/is-valid](./valid) | GET | Check whether a specific dataset is valid. | `dataset`: name of the dataset | | -| [/splits](./splits) | GET | Get the list of configurations and splits of a dataset. | `dataset`: name of the dataset | | -| [/first-rows](./first-rows) | GET | Get the columns (with data type) and first rows of a dataset split. | - `dataset`: name of the dataset<br>- `config`: name of the config<br>- `split`: name of the split | | +| Endpoint | Method | Description | Query parameters | +| --------------------------- | ------ | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| [/valid](./valid) | GET | Get the list of datasets hosted in the Hub and supported by the datasets server. | none | +| [/is-valid](./valid) | GET | Check whether a specific dataset is valid. | `dataset`: name of the dataset | +| [/splits](./splits) | GET | Get the list of configurations and splits of a dataset. | `dataset`: name of the dataset | +| [/first-rows](./first-rows) | GET | Get the columns (with data type) and first rows of a dataset split. | - `dataset`: name of the dataset<br>- `config`: name of the config<br>- `split`: name of the split | +| [/parquet](./parquet) | GET | Get the list of parquet files of a dataset. | `dataset`: name of the datasett | @@ -21,3 +23,11 @@ There is no installation or setup required to use Datasets Server. - -Sign up for a <a href="https://huggingface.co/join">Hugging Face account</a> if you don't already have one! While you can use Datasets Server without a Hugging Face account, you won't be able to access <a href="https://huggingface.co/docs/hub/datasets-gated">gated datasets</a> like <a href="https://huggingface.co/datasets/mozilla-foundation/common_voice_10_0">CommonVoice</a> and <a href="https://huggingface.co/datasets/imagenet-1k">ImageNet</a> without providing a <a href="https://huggingface.co/settings/tokens">user token</a> which you can find in your user settings. - + Sign up for a <a href="https://huggingface.co/join">Hugging Face account</a>{" "} + if you don't already have one! While you can use Datasets Server without a + Hugging Face account, you won't be able to access{" "} + <a href="https://huggingface.co/docs/hub/datasets-gated">gated datasets</a>{" "} + like{" "} + <a href="https://huggingface.co/datasets/mozilla-foundation/common_voice_10_0"> + CommonVoice + </a>{" "} + and <a href="https://huggingface.co/datasets/imagenet-1k">ImageNet</a> without + providing a <a href="https://huggingface.co/settings/tokens">user token</a>{" "} + which you can find in your user settings. @@ -238,0 +249,41 @@ curl https://datasets-server.huggingface.co/first-rows?dataset=rotten_tomatoes&c + +## Access parquet files + +The datasets-server converts every dataset on the Hub to the [parquet](https://parquet.apache.org/) format. The `/parquet` endpoint returns a JSON list of the parquet URLs for a dataset: + +<inferencesnippet> +<python> +```python +import requests +API_URL = "https://datasets-server.huggingface.co/parquet?dataset=rotten_tomatoes" +def query(): + response = requests.request("GET", API_URL) + return response.json() +data = query() +``` +</python> +<js> +```js +import fetch from "node-fetch"; +async function query(data) { + const response = await fetch( + "https://datasets-server.huggingface.co/parquet?dataset=rotten_tomatoes", + { + method: "GET" + } + ); + const result = await response.json(); + return result; +} +query().then((response) => { + console.log(JSON.stringify(response)); +}); +``` +</js> +<curl> +```curl +curl https://datasets-server.huggingface.co/parquet?dataset=rotten_tomatoes \ + -X GET +``` +</curl> +</inferencesnippet> diff --git a/docs/source/server.mdx b/docs/source/server.mdx index da02dfde..fa81222e 100644 --- a/docs/source/server.mdx +++ b/docs/source/server.mdx @@ -16 +16 @@ The job queue is a list of jobs stored in a Mongo database that should be comple -There are two jobs: +There are three jobs: @@ -18,2 +18,3 @@ There are two jobs: -- `splits` corresponds to the `/splits` endpoint. It refreshes a dataset and then returns that dataset's splits and configurations. For every split in the dataset, it'll create a new job. -- `first-rows` corresponds to the `/first-rows` endpoint. It gets the first 100 rows and columns of a dataset split. +- `/splits` corresponds to the `/splits` endpoint. It refreshes a dataset and then returns that dataset's splits and configurations. For every split in the dataset, it'll create a new job. +- `/first-rows` corresponds to the `/first-rows` endpoint. It gets the first 100 rows and columns of a dataset split. +- `/parquet` corresponds to the `/parquet` endpoint. It downloads the whole dataset, converts it to parquet](https://parquet.apache.org/) and publishes the parquet files to the Hub. diff --git a/docs/source/splits.mdx b/docs/source/splits.mdx index 1a924579..8d72ade9 100644 --- a/docs/source/splits.mdx +++ b/docs/source/splits.mdx @@ -13 +13 @@ Datasets typically have splits and may also have configurations. A *split* is a -This guide shows you how to use Datasets Server's `/split` endpoint to retrieve a dataset's splits and configurations programmatically. Feel free to also try it out with [Postman](https://www.postman.com/huggingface/workspace/hugging-face-apis/request/23242779-f0cde3b9-c2ee-4062-aaca-65c4cfdd96f8), [RapidAPI](https://rapidapi.com/hugging-face-hugging-face-default/api/hugging-face-datasets-api), or [ReDoc](https://redocly.github.io/redoc/?url=https://datasets-server.huggingface.co/openapi.json#operation/listSplits) +This guide shows you how to use Datasets Server's `/splits` endpoint to retrieve a dataset's splits and configurations programmatically. Feel free to also try it out with [Postman](https://www.postman.com/huggingface/workspace/hugging-face-apis/request/23242779-f0cde3b9-c2ee-4062-aaca-65c4cfdd96f8), [RapidAPI](https://rapidapi.com/hugging-face-hugging-face-default/api/hugging-face-datasets-api), or [ReDoc](https://redocly.github.io/redoc/?url=https://datasets-server.huggingface.co/openapi.json#operation/listSplits) @@ -15 +15 @@ This guide shows you how to use Datasets Server's `/split` endpoint to retrieve -The `/split` endpoint accepts the dataset name as it's query parameter: +The `/splits` endpoint accepts the dataset name as its query parameter: diff --git a/e2e/Makefile b/e2e/Makefile index 3b312b8d..5c8cafc2 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -7 +7,2 @@ export COMMON_HF_ENDPOINT := https://hub-ci.huggingface.co -export COMMON_HF_TOKEN := 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 +export COMMON_HF_TOKEN := hf_datasets-server_token +export PARQUET_COMMITTER_HF_TOKEN := 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 diff --git a/e2e/poetry.lock b/e2e/poetry.lock index 0e8dab51..27642a87 100644 --- a/e2e/poetry.lock +++ b/e2e/poetry.lock @@ -75 +75 @@ name = "certifi" -version = "2022.9.24" +version = "2022.12.7" @@ -787,2 +787,2 @@ certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, diff --git a/e2e/pyproject.toml b/e2e/pyproject.toml index 98744dfc..cbd838f3 100644 --- a/e2e/pyproject.toml +++ b/e2e/pyproject.toml @@ -29 +28,0 @@ requires = ["poetry-core>=1.0.0"] -# addopts = "-k 'wip'" diff --git a/e2e/tests/test_11_auth.py b/e2e/tests/test_11_auth.py index 8f250b15..95bed754 100644 --- a/e2e/tests/test_11_auth.py +++ b/e2e/tests/test_11_auth.py @@ -10,0 +11 @@ from .utils import ( + poll_parquet, @@ -57,0 +59,4 @@ def test_split_public_auth( + r_parquet = poll_parquet(dataset, headers=auth_headers[auth]) + error_code_parquet = error_code_splits + assert r_parquet.status_code == response_status_code, log(r_parquet, dataset) + assert r_parquet.headers.get("X-Error-Code") == error_code_parquet, log(r_parquet, dataset) diff --git a/e2e/tests/test_21_api_metrics.py b/e2e/tests/test_21_api_metrics.py index af87a786..4fb326df 100644 --- a/e2e/tests/test_21_api_metrics.py +++ b/e2e/tests/test_21_api_metrics.py @@ -33 +33 @@ def test_metrics(): - for endpoint in ["/splits", "/first-rows"]: + for endpoint in ["/splits", "/first-rows", "/parquet"]: diff --git a/e2e/tests/test_31_admin_metrics.py b/e2e/tests/test_31_admin_metrics.py index 206c1aa3..26f620b7 100644 --- a/e2e/tests/test_31_admin_metrics.py +++ b/e2e/tests/test_31_admin_metrics.py @@ -33 +33 @@ def test_metrics(): - for queue in ["/splits", "/first-rows"]: + for queue in ["/splits", "/first-rows", "/parquet"]: @@ -38 +38 @@ def test_metrics(): - for cache_kind in ["/splits", "/first-rows"]: + for cache_kind in ["/splits", "/first-rows", "/parquet"]: diff --git a/e2e/tests/utils.py b/e2e/tests/utils.py index 880b64d8..ac6dd203 100644 --- a/e2e/tests/utils.py +++ b/e2e/tests/utils.py @@ -72,0 +73,4 @@ def post_refresh(dataset: str) -> Response: +def poll_parquet(dataset: str, headers: Headers = None) -> Response: + return poll(f"/parquet?dataset={dataset}", error_field="error", headers=headers) + + diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index d8a9884d..cda294e3 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -83 +83 @@ name = "certifi" -version = "2022.9.24" +version = "2022.12.7" @@ -198 +198 @@ name = "filelock" -version = "3.8.1" +version = "3.8.2" @@ -320 +320 @@ name = "libcommon" -version = "0.5.3" +version = "0.5.10" @@ -338 +338 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" @@ -865 +865 @@ python-versions = "3.9.15" -content-hash = "f50de0b55e954056cf522ef2b71dfa3677488f11668cd1479a478d7e544a9263" +content-hash = "bbaaeaa2093464c1e5204af154045c1b0524614bc886bc543f7ee469572f55b6" @@ -908,2 +908,2 @@ certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, @@ -996,2 +996,2 @@ filelock = [ - {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, - {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, + {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"}, + {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"}, @@ -1032 +1032 @@ libcommon = [ - {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, + {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index d095265b..3a7a8e90 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } @@ -29 +28,0 @@ requires = ["poetry-core>=1.0.0"] -#addopts = "-k 'wip'" diff --git a/libs/libcommon/README.md b/libs/libcommon/README.md index 42319f7c..c5f13c48 100644 --- a/libs/libcommon/README.md +++ b/libs/libcommon/README.md @@ -4,0 +5,7 @@ A Python library with common code (cache, queue, workers logic, processing steps +## Assets configuration + +Set the assets (images and audio files stored locally) environment variables to configure the following aspects: + +- `ASSETS_BASE_URL`: base URL for the assets files. Set accordingly to the datasets-server domain, e.g., https://datasets-server.huggingface.co/assets. Defaults to `assets` (TODO: default to an URL). +- `ASSETS_STORAGE_DIRECTORY`: directory where the asset files are stored. Defaults to empty, which means the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. + @@ -9 +15,0 @@ Set the common environment variables to configure the following aspects: -- `COMMON_ASSETS_BASE_URL`: base URL for the assets files. Set accordingly to the datasets-server domain, e.g., https://datasets-server.huggingface.co/assets. Defaults to `assets`. @@ -18 +23,0 @@ Set environment variables to configure the storage of precomputed API responses -- `CACHE_ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, which means the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. diff --git a/libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl new file mode 100644 index 00000000..2d75199a Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.10.tar.gz b/libs/libcommon/dist/libcommon-0.5.10.tar.gz new file mode 100644 index 00000000..90599cb7 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.10.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl index 3562f67e..efa4b510 100644 Binary files a/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl and b/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.3.tar.gz b/libs/libcommon/dist/libcommon-0.5.3.tar.gz index 8ba33bb3..d28c2ff6 100644 Binary files a/libs/libcommon/dist/libcommon-0.5.3.tar.gz and b/libs/libcommon/dist/libcommon-0.5.3.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.4-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.4-py3-none-any.whl new file mode 100644 index 00000000..5b508cbc Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.4-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.4.tar.gz b/libs/libcommon/dist/libcommon-0.5.4.tar.gz new file mode 100644 index 00000000..828a1f83 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.4.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.5-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.5-py3-none-any.whl new file mode 100644 index 00000000..cec63769 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.5-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.5.tar.gz b/libs/libcommon/dist/libcommon-0.5.5.tar.gz new file mode 100644 index 00000000..236fde04 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.5.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.6-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.6-py3-none-any.whl new file mode 100644 index 00000000..de99fdc8 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.6-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.6.tar.gz b/libs/libcommon/dist/libcommon-0.5.6.tar.gz new file mode 100644 index 00000000..1f9b059f Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.6.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.7-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.7-py3-none-any.whl new file mode 100644 index 00000000..a77d211c Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.7-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.7.tar.gz b/libs/libcommon/dist/libcommon-0.5.7.tar.gz new file mode 100644 index 00000000..843c381e Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.7.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.8-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.8-py3-none-any.whl new file mode 100644 index 00000000..ca1c0248 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.8-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.8.tar.gz b/libs/libcommon/dist/libcommon-0.5.8.tar.gz new file mode 100644 index 00000000..3c12d1aa Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.8.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.9-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.9-py3-none-any.whl new file mode 100644 index 00000000..74f3db82 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.9-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.9.tar.gz b/libs/libcommon/dist/libcommon-0.5.9.tar.gz new file mode 100644 index 00000000..d53f39e4 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.9.tar.gz differ diff --git a/libs/libcommon/poetry.lock b/libs/libcommon/poetry.lock index 1ec23a1e..befc10f7 100644 --- a/libs/libcommon/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -83 +83 @@ name = "certifi" -version = "2022.9.24" +version = "2022.12.7" @@ -894,2 +894,2 @@ certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 09d28453..d00388f9 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.5.3" +version = "0.5.10" @@ -36 +35,0 @@ requires = ["poetry-core>=1.0.0"] -# addopts = "-k 'wip'" diff --git a/libs/libcommon/src/libcommon/asset.py b/libs/libcommon/src/libcommon/asset.py deleted file mode 100644 index 970013e4..00000000 --- a/libs/libcommon/src/libcommon/asset.py +++ /dev/null @@ -1,20 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -import os -from typing import Optional - -from appdirs import user_cache_dir # type:ignore - -DATASET_SEPARATOR = "--" -ASSET_DIR_MODE = 0o755 - - -def init_assets_dir(assets_directory: Optional[str] = None) -> str: - # set it to the default cache location on the machine, if ASSETS_DIRECTORY is null - if assets_directory is None: - assets_directory = user_cache_dir("datasets_server_assets") - os.makedirs(assets_directory, exist_ok=True) - logging.info(f"Assets directory: {assets_directory}") - return assets_directory diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index 90ea6102..b5495b17 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -8 +7,0 @@ from environs import Env -from libcommon.asset import init_assets_dir @@ -12,0 +12,16 @@ from libcommon.simple_cache import connect_to_cache_database +from libcommon.storage import init_dir + + +class AssetsConfig: + base_url: str + storage_directory: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("ASSETS_"): + self.base_url = env.str(name="BASE_URL", default="assets") + self._storage_directory = env.str(name="STORAGE_DIRECTORY", default=None) + self.setup() + + def setup(self): + self.storage_directory = init_dir(directory=self._storage_directory, appname="datasets_server_assets") @@ -16 +30,0 @@ class CommonConfig: - assets_base_url: str @@ -24 +37,0 @@ class CommonConfig: - self.assets_base_url = env.str(name="ASSETS_BASE_URL", default="assets") @@ -36,2 +48,0 @@ class CacheConfig: - _assets_directory: Optional[str] - assets_directory: str @@ -44 +54,0 @@ class CacheConfig: - self._assets_directory = env.str(name="ASSETS_DIRECTORY", default=None) @@ -51 +60,0 @@ class CacheConfig: - self.assets_directory = init_assets_dir(assets_directory=self._assets_directory) @@ -91,0 +101 @@ class ProcessingGraphConfig: + "/parquet": {"input_type": "dataset"}, diff --git a/libs/libcommon/src/libcommon/dataset.py b/libs/libcommon/src/libcommon/dataset.py index 67025103..3d4af686 100644 --- a/libs/libcommon/src/libcommon/dataset.py +++ b/libs/libcommon/src/libcommon/dataset.py @@ -21 +20,0 @@ DatasetErrorCode = Literal[ - "PrivateDatasetError", @@ -80,11 +78,0 @@ class GatedExtraFieldsError(DatasetError): -class PrivateDatasetError(DatasetError): - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__( - message=message, - status_code=HTTPStatus.NOT_FOUND, - code="PrivateDatasetError", - cause=cause, - disclose_cause=False, - ) - - @@ -110 +98,2 @@ def ask_access(dataset: str, hf_endpoint: str, hf_token: Optional[str]) -> None: - token does not give the sufficient access to the dataset (private, for example). + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) @@ -153 +142,2 @@ def get_dataset_info_for_supported_datasets( - token does not give the sufficient access to the dataset (private, for example). + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) @@ -155,2 +144,0 @@ def get_dataset_info_for_supported_datasets( - - ['~libcommon.dataset.PrivateDatasetError']: if the dataset is private, since private datasets - are not supported in datasets-server. @@ -164 +152 @@ def get_dataset_info_for_supported_datasets( - raise PrivateDatasetError(f"Dataset '{dataset}' is not supported.") + raise DatasetNotFoundError("The dataset does not exist on the Hub, or is private.") @@ -192 +180,2 @@ def get_dataset_git_revision( - token does not give the sufficient access to the dataset (private, for example). + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) @@ -194,2 +182,0 @@ def get_dataset_git_revision( - - ['~libcommon.dataset.PrivateDatasetError']: if the dataset is private, since private datasets - are not supported in datasets-server. @@ -224 +211,2 @@ def check_support( - token does not give the sufficient access to the dataset (private, for example). + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) @@ -226,2 +213,0 @@ def check_support( - - ['~libcommon.dataset.PrivateDatasetError']: if the dataset is private, since private datasets - are not supported in datasets-server. diff --git a/libs/libcommon/src/libcommon/storage.py b/libs/libcommon/src/libcommon/storage.py new file mode 100644 index 00000000..555f9b24 --- /dev/null +++ b/libs/libcommon/src/libcommon/storage.py @@ -0,0 +1,44 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +import shutil +from os import PathLike, makedirs +from typing import Optional, Union + +from appdirs import user_cache_dir # type:ignore + +StrPath = Union[str, PathLike[str]] + + +def init_dir(directory: Optional[StrPath] = None, appname: Optional[str] = None) -> StrPath: + """Initialize a directory. + + If directory is None, it will be set to the default cache location on the machine (using appname as a key, if + not None). + + Args: + directory (Optional[Union[str, PathLike[str]]], optional): The directory to initialize. Defaults to None. + appname (Optional[str], optional): The name of the application. Used if `directory`is None. Defaults to None. + + Returns: + Union[str, PathLike[str]]: The directory. + """ + if directory is None: + directory = user_cache_dir(appname=appname) + logging.debug(f"Directory defaulting to user-specific cache: {directory}") + makedirs(directory, exist_ok=True) + logging.debug(f"Directory created at: {directory}") + return directory + + +def remove_dir(directory: StrPath) -> None: + """Remove a directory. + + If the directory does not exist, don't raise. + + Args: + directory (Union[str, PathLike[str]]): The directory to remove. + """ + shutil.rmtree(directory, ignore_errors=True) + logging.debug(f"Directory removed: {directory}") diff --git a/libs/libcommon/src/libcommon/worker.py b/libs/libcommon/src/libcommon/worker.py index fe4cb621..052b2a00 100644 --- a/libs/libcommon/src/libcommon/worker.py +++ b/libs/libcommon/src/libcommon/worker.py @@ -170,0 +171,7 @@ class Worker(ABC): + def has_storage(self) -> bool: + # placeholder, to be overridden by workers if needed + return True + + def has_resources(self) -> bool: + return self.has_memory() and self.has_cpu() and self.has_storage() + @@ -181 +188 @@ class Worker(ABC): - if self.has_memory() and self.has_cpu() and self.process_next_job(): + if self.has_resources() and self.process_next_job(): @@ -200 +207 @@ class Worker(ABC): - parameters_for_log = "dataset={dataset}" + ("" if split is None else f"config={config} split={split}") + parameters_for_log = f"dataset={dataset}" + ("" if split is None else f"config={config} split={split}") @@ -212,2 +219,5 @@ class Worker(ABC): - self.process(dataset=dataset, config=config, split=split, force=force) - finished_status = Status.SUCCESS + finished_status = ( + Status.SUCCESS + if self.process(dataset=dataset, config=config, split=split, force=force) + else Status.ERROR + ) @@ -239,0 +250,9 @@ class Worker(ABC): + def get_dataset_git_revision( + self, + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, + ) -> Optional[str]: + """Get the git revision of the dataset repository.""" + return get_dataset_git_revision(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + @@ -261 +280 @@ class Worker(ABC): - if force or config is None or split is None: + if force: @@ -267 +286 @@ class Worker(ABC): - dataset_git_revision = get_dataset_git_revision( + dataset_git_revision = self.get_dataset_git_revision( @@ -290 +309 @@ class Worker(ABC): - dataset_git_revision = get_dataset_git_revision( + dataset_git_revision = self.get_dataset_git_revision( @@ -296 +315,6 @@ class Worker(ABC): - content = self.compute(dataset=dataset, config=config, split=split, force=force) + try: + self.pre_compute(dataset=dataset, config=config, split=split, force=force) + content = self.compute(dataset=dataset, config=config, split=split, force=force) + finally: + # ensure the post_compute hook is called even if the compute raises an exception + self.post_compute(dataset=dataset, config=config, split=split, force=force) @@ -335,0 +360,10 @@ class Worker(ABC): + def pre_compute( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> None: + """Hook method called before the compute method.""" + pass + @@ -344,0 +379,10 @@ class Worker(ABC): + + def post_compute( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> None: + """Hook method called after the compute method.""" + pass diff --git a/libs/libcommon/tests/test_processing_steps.py b/libs/libcommon/tests/test_processing_steps.py index 3489cbde..ef50121e 100644 --- a/libs/libcommon/tests/test_processing_steps.py +++ b/libs/libcommon/tests/test_processing_steps.py @@ -12,0 +13 @@ def test_default_graph(): + parquet = graph.get_step("/parquet") @@ -15,0 +17 @@ def test_default_graph(): + assert parquet is not None @@ -18,0 +21 @@ def test_default_graph(): + assert parquet.parent is None @@ -21,0 +25 @@ def test_default_graph(): + assert parquet.children == [] @@ -24,0 +29 @@ def test_default_graph(): + assert parquet.get_ancestors() == [] @@ -26 +31 @@ def test_default_graph(): - assert graph.get_first_steps() == [splits] + assert graph.get_first_steps() == [splits, parquet] diff --git a/libs/libcommon/tests/test_storage.py b/libs/libcommon/tests/test_storage.py new file mode 100644 index 00000000..728a9650 --- /dev/null +++ b/libs/libcommon/tests/test_storage.py @@ -0,0 +1,78 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from pathlib import Path +from typing import Optional + +import pytest + +from libcommon.storage import StrPath, init_dir, remove_dir + + [email protected]( + "has_directory,is_directory_string,has_appname", + [ + (False, False, False), + (False, False, True), + (False, True, False), + (False, True, True), + (True, False, False), + (True, False, True), + (True, True, False), + (True, True, True), + ], +) +def test_init_dir( + tmp_path_factory: pytest.TempPathFactory, has_directory: bool, is_directory_string: bool, has_appname: bool +) -> None: + subdirectory = "subdirectory" + tmp_path = tmp_path_factory.mktemp("test") / subdirectory + appname = "appname" if has_appname else None + directory: Optional[StrPath] + if has_directory is None: + directory = None + result = init_dir(directory=directory, appname=appname) + assert result != directory, result + assert subdirectory not in str(result), result + assert type(result) is str, result + if has_appname: + assert appname in str(result) is appname, result + else: + assert appname is None + else: + directory = str(tmp_path) if is_directory_string else tmp_path + result = init_dir(directory=directory, appname=appname) + assert result == directory + assert subdirectory in str(result), result + if appname is not None: + assert appname not in str(result), result + Path(result).exists() + Path(result).is_dir() + + [email protected]( + "exists,is_string", + [ + (False, False), + (True, False), + (False, True), + (True, True), + ], +) +def test_remove_dir(tmp_path_factory: pytest.TempPathFactory, exists: bool, is_string: bool) -> None: + subdirectory = "subdirectory" + tmp_path = tmp_path_factory.mktemp("test") / subdirectory + tmp_file = tmp_path / "file.txt" + if exists: + tmp_path.mkdir(parents=True, exist_ok=True) + tmp_file.touch() + assert tmp_path.exists() is exists + assert tmp_path.is_dir() is exists + assert tmp_file.exists() is exists + assert tmp_file.is_file() is exists + directory: StrPath = str(tmp_path) if is_string else tmp_path + remove_dir(directory) + assert tmp_path.exists() is False + assert tmp_path.is_dir() is False + assert tmp_file.exists() is False + assert tmp_file.is_file() is False diff --git a/libs/libcommon/tests/test_worker.py b/libs/libcommon/tests/test_worker.py index 86702cb2..33c8d953 100644 --- a/libs/libcommon/tests/test_worker.py +++ b/libs/libcommon/tests/test_worker.py @@ -6,0 +7,2 @@ from libcommon.processing_graph import ProcessingStep +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import _clean_cache_database @@ -9,0 +12,6 @@ from libcommon.worker import Worker, parse_version [email protected](autouse=True) +def clean_mongo_database() -> None: + _clean_queue_database() + _clean_cache_database() + + @@ -10,0 +19,9 @@ class DummyWorker(Worker): + # override get_dataset_git_revision to avoid making a request to the Hub + def get_dataset_git_revision( + self, + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, + ) -> Optional[str]: + return "0.1.2" + @@ -16,0 +34,5 @@ class DummyWorker(Worker): +class NoStorageWorker(DummyWorker): + def has_storage(self) -> bool: + return False + + @@ -33,0 +56,24 @@ def test_parse_version(string_version: str, expected_major_version: int, should_ +def test_has_storage( + test_processing_step: ProcessingStep, + common_config: CommonConfig, + queue_config: QueueConfig, + worker_config: WorkerConfig, +) -> None: + worker = DummyWorker( + processing_step=test_processing_step, + common_config=common_config, + queue_config=queue_config, + worker_config=worker_config, + version="1.0.0", + ) + assert worker.has_storage() is True + worker = NoStorageWorker( + processing_step=test_processing_step, + common_config=common_config, + queue_config=queue_config, + worker_config=worker_config, + version="1.0.0", + ) + assert worker.has_storage() is False + + @@ -67,2 +113 @@ def test_compare_major_version( -def should_skip_job( - hub_public_csv: str, +def test_should_skip_job( @@ -81 +126,9 @@ def should_skip_job( - dataset = hub_public_csv + dataset = "dataset" + config = "config" + split = "split" + + assert worker.should_skip_job(dataset=dataset, config=config, split=split) is False + # we add an entry to the cache + worker.process(dataset=dataset, config=config, split=split) + assert worker.should_skip_job(dataset=dataset, config=config, split=split) is True + diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 6b3fdf62..78ffd8d1 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -111 +111 @@ name = "certifi" -version = "2022.9.24" +version = "2022.12.7" @@ -356 +356 @@ name = "libcommon" -version = "0.5.3" +version = "0.5.10" @@ -374 +374 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" @@ -928,0 +929,8 @@ python-versions = "*" +[[package]] +name = "types-psutil" +version = "5.9.5.5" +description = "Typing stubs for psutil" +category = "dev" +optional = false +python-versions = "*" + @@ -991 +999 @@ python-versions = "3.9.15" -content-hash = "a2bd3c8191be9b63471f3e41338400451db039c50d15c1afeb8f262fd0071457" +content-hash = "f315ae701811672df27b0efc97cc4141c2c051cc858a1836a429506b27fba778" @@ -1042,2 +1050,2 @@ certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, @@ -1170 +1178 @@ libcommon = [ - {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, + {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, @@ -1660,0 +1669,4 @@ typed-ast = [ +types-psutil = [ + {file = "types-psutil-5.9.5.5.tar.gz", hash = "sha256:4f26fdb2cb064b274cbc6359fba4abf3b3a2993d7d4abc336ad0947568212c62"}, + {file = "types_psutil-5.9.5.5-py3-none-any.whl", hash = "sha256:e576bb81c74f7443b067e94f92435894d5dd561161bec3d6401727b63df009f0"}, +] diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 299c5988..945b52fa 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } @@ -28,0 +29 @@ responses = "^0.21.0" +types-psutil = "^5.9.5" @@ -35 +35,0 @@ requires = ["poetry-core>=1.0.0"] -# addopts = "-k 'wip'" diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 7d2d948e..7a09c57b 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -24 +24,3 @@ def create_app() -> Starlette: - prometheus = Prometheus(processing_steps=processing_steps) + prometheus = Prometheus( + processing_steps=processing_steps, assets_storage_directory=app_config.assets.storage_directory + ) diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index 92c42555..c5b1cfd4 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -7,0 +8 @@ from libcommon.config import ( + AssetsConfig, @@ -47,0 +49 @@ class AppConfig: + assets: AssetsConfig @@ -55,0 +58 @@ class AppConfig: + self.assets = AssetsConfig() diff --git a/services/admin/src/admin/prometheus.py b/services/admin/src/admin/prometheus.py index 14fe0185..b8913016 100644 --- a/services/admin/src/admin/prometheus.py +++ b/services/admin/src/admin/prometheus.py @@ -20,0 +21 @@ from prometheus_client.multiprocess import ( # type: ignore # https://github.co +from psutil import disk_usage @@ -36,0 +38,6 @@ RESPONSES_IN_CACHE_TOTAL = Gauge( +ASSETS_DISK_USAGE = Gauge( + name="assets_disk_usage", + documentation="Usage of the disk where the assets are stored", + labelnames=["type"], + multiprocess_mode="liveall", +) @@ -41,0 +49 @@ class Prometheus: + assets_storage_directory: str @@ -62,0 +71,6 @@ class Prometheus: + # Assets storage metrics + total, used, free, percent = disk_usage(self.assets_storage_directory) + ASSETS_DISK_USAGE.labels(type="total").set(total) + ASSETS_DISK_USAGE.labels(type="used").set(used) + ASSETS_DISK_USAGE.labels(type="free").set(free) + ASSETS_DISK_USAGE.labels(type="percent").set(percent) diff --git a/services/admin/tests/test_prometheus.py b/services/admin/tests/test_prometheus.py index bde3a2e6..44252eec 100644 --- a/services/admin/tests/test_prometheus.py +++ b/services/admin/tests/test_prometheus.py @@ -14 +14,3 @@ def test_prometheus(app_config: AppConfig, processing_steps: List[ProcessingStep - prometheus = Prometheus(processing_steps=processing_steps) + prometheus = Prometheus( + processing_steps=processing_steps, assets_storage_directory=app_config.assets.storage_directory + ) @@ -35,0 +38,5 @@ def test_prometheus(app_config: AppConfig, processing_steps: List[ProcessingStep + + for type in ["total", "used", "free", "percent"]: + assert "assets_disk_usage{" + additional_field + 'type="' + type + '"}' in metrics + assert metrics["assets_disk_usage{" + additional_field + 'type="' + type + '"}'] >= 0 + assert metrics["assets_disk_usage{" + additional_field + 'type="percent"}'] <= 100 diff --git a/services/api/README.md b/services/api/README.md index 5543a2a3..8ecbc695 100644 --- a/services/api/README.md +++ b/services/api/README.md @@ -43 +42,0 @@ See https://huggingface.co/docs/datasets-server -- /assets: return a static asset, ej. https://datasets-server.huggingface.co/assets/food101/--/default/train/0/image/2885220.jpg diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 2e74fd97..2d596907 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -111 +111 @@ name = "certifi" -version = "2022.9.24" +version = "2022.12.7" @@ -372 +372 @@ name = "libcommon" -version = "0.5.3" +version = "0.5.10" @@ -390 +390 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" @@ -1033 +1033 @@ python-versions = "3.9.15" -content-hash = "20c63e31801f3652a9b58587ed63d240d790be6c40d313f118f21a4b4e34caca" +content-hash = "0288367cb9b868444331cdbdf2eb6095c5d939a475811c612e30f4cdb7aa8785" @@ -1084,2 +1084,2 @@ certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, @@ -1216 +1216 @@ libcommon = [ - {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, + {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index feb2441b..b857ffa4 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } @@ -36 +35,0 @@ requires = ["poetry-core>=1.0.0"] -# addopts = "-k 'wip'" diff --git a/services/api/src/api/app.py b/services/api/src/api/app.py index 49621535..6cf24221 100644 --- a/services/api/src/api/app.py +++ b/services/api/src/api/app.py @@ -11,2 +11 @@ from starlette.middleware.gzip import GZipMiddleware -from starlette.routing import BaseRoute, Mount, Route -from starlette.staticfiles import StaticFiles +from starlette.routing import BaseRoute, Route @@ -86,9 +85 @@ def create_app() -> Starlette: - for_development_only: List[BaseRoute] = [ - # it can only be accessed in development. In production the reverse-proxy serves the assets - Mount( - "/assets", - app=StaticFiles(directory=app_config.cache.assets_directory, check_dir=True), - name="assets", - ), - ] - routes: List[BaseRoute] = valid + processing_steps + to_protect + protected + for_development_only + routes: List[BaseRoute] = valid + processing_steps + to_protect + protected diff --git a/services/api/tests/test_app_real.py b/services/api/tests/test_app_real.py index 26e31700..28958063 100644 --- a/services/api/tests/test_app_real.py +++ b/services/api/tests/test_app_real.py @@ -47 +46,0 @@ def real_clean_mongo_databases(real_app_config: AppConfig) -> None: [email protected] diff --git a/services/reverse-proxy/README.md b/services/reverse-proxy/README.md index 2c0a7355..2801bf44 100644 --- a/services/reverse-proxy/README.md +++ b/services/reverse-proxy/README.md @@ -17 +17 @@ It takes various environment variables, all of them are mandatory: -- `COMMON_ASSETS_DIRECTORY`: the directory that contains the static assets, eg `/assets` +- `ASSETS_DIRECTORY`: the directory that contains the static assets, eg `/assets` @@ -25 +25 @@ The image requires three directories to be mounted (from volumes): -- `$COMMON_ASSETS_DIRECTORY` (read-only): the directory that contains the static assets. +- `$ASSETS_DIRECTORY` (read-only): the directory that contains the static assets. diff --git a/tools/PythonTest.mk b/tools/PythonTest.mk index c6ebbb17..b4c8e48d 100644 --- a/tools/PythonTest.mk +++ b/tools/PythonTest.mk @@ -4 +4,7 @@ test: - poetry run python -m pytest -vv -x tests + poetry run python -m pytest -vv -x ${ADDOPTS} tests + $(MAKE) down + +.PHONY: debug +debug: + $(MAKE) up + poetry run python -m pytest -vv -x --log-cli-level=DEBUG --capture=tee-sys --pdb ${ADDOPTS} tests diff --git a/tools/docker-compose-base.yml b/tools/docker-compose-base.yml index 6388202c..1613fbf2 100644 --- a/tools/docker-compose-base.yml +++ b/tools/docker-compose-base.yml @@ -6 +5,0 @@ services: - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy @@ -13 +11,0 @@ services: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} @@ -29,2 +27,2 @@ services: - HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} - HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + DATASETS_BASED_HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_DATASETS_CACHE-/modules-cache} diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 396d394f..ffcc7a8e 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -7 +7 @@ services: - - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro + - assets:${ASSETS_STORAGE_DIRECTORY-/assets}:ro @@ -12 +12 @@ services: - ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + ASSETS_DIRECTORY: ${ASSETS_STORAGE_DIRECTORY-/assets} @@ -51,2 +50,0 @@ services: - volumes: - - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro @@ -87,4 +84,0 @@ services: - HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} - HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} - NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} @@ -99,0 +94 @@ services: + - assets:${ASSETS_STORAGE_DIRECTORY-/assets}:rw @@ -106,0 +102,2 @@ services: + ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + ASSETS_STORAGE_DIRECTORY: ${ASSETS_STORAGE_DIRECTORY-/assets} @@ -115,0 +113,25 @@ services: + worker-parquet: + # build: + # context: .. + # dockerfile: workers/parquet/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} + volumes: + - parquet-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - parquet-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - parquet-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + DATASETS_BASED_ENDPOINT: "/parquet" # hard-coded + PARQUET_BLOCKED_DATASETS: ${PARQUET_BLOCKED_DATASETS-} + PARQUET_COMMIT_MESSAGE: ${PARQUET_COMMIT_MESSAGE-Update parquet files} + PARQUET_COMMITTER_HF_TOKEN: ${PARQUET_COMMITTER_HF_TOKEN-} + PARQUET_MAX_DATASET_SIZE: ${PARQUET_MAX_DATASET_SIZE-100_000_000} + PARQUET_SOURCE_REVISION: ${PARQUET_SOURCE_REVISION-main} + PARQUET_SUPPORTED_DATASETS: ${PARQUET_SUPPORTED_DATASETS-} + PARQUET_TARGET_REVISION: ${PARQUET_TARGET_REVISION-refs/convert/parquet} + PARQUET_URL_TEMPLATE: ${PARQUET_URL_TEMPLATE-/datasets/%s/resolve/%s/%s} + depends_on: + - mongodb + restart: always @@ -131,0 +154,3 @@ volumes: + parquet-datasets-cache: + parquet-modules-cache: + parquet-numba-cache: diff --git a/workers/datasets_based/README.md b/workers/datasets_based/README.md index 1da8d12a..a22cee6b 100644 --- a/workers/datasets_based/README.md +++ b/workers/datasets_based/README.md @@ -3 +3 @@ -> Worker that pre-computes and caches the response to /splits +> Worker that pre-computes and caches the response to /splits, /first-rows or /parquet. @@ -7 +7 @@ -The worker can be configured using environment variables. They are grouped by scope. +Use environment variables to configure the worker. The prefix of each environment variable gives its scope. @@ -11 +11 @@ The worker can be configured using environment variables. They are grouped by sc -The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. The endpoint is configured using the `DATASETS_BASED_ENDPOINT` environment variable: +Set environment variables to configure the datasets-based worker (`DATASETS_BASED_` prefix): @@ -13 +13,18 @@ The same worker is used for different endpoints to reuse shared code and depende -- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). It can only be `/splits` at the moment. +- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. Allowed values: `/splits`, `/first_rows`, and ` /parquet``. Defaults to `/splits`. +- `DATASETS_BASED_HF_DATASETS_CACHE`: directory where the `datasets` library will store the cached datasets' data. If not set, the datasets library will choose the default location. Defaults to None. + +Also, set the modules cache configuration for the datasets-based worker. See [../../libs/libcommon/README.md](../../libs/libcommon/README.md). Note that this variable has no `DATASETS_BASED_` prefix: + +- `HF_MODULES_CACHE`: directory where the `datasets` library will store the cached dataset scripts. If not set, the datasets library will choose the default location. Defaults to None. + +### Numba library + +Numba requires setting the `NUMBA_CACHE_DIR` environment variable to a writable directory to cache the compiled functions. Required on cloud infrastructure (see https://stackoverflow.com/a/63367171/7351594): + +- `NUMBA_CACHE_DIR`: directory where the `numba` decorators (used by `librosa`) can write cache. + +### Huggingface_hub library + +If the Hub is not https://huggingface.co (i.e., if you set the `COMMON_HF_ENDPOINT` environment variable), you must set the `HF_ENDPOINT` environment variable to the same value. See https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 for more details: + +- `HF_ENDPOINT`: the URL of the Hub. Defaults to `https://huggingface.co`. @@ -17 +34,3 @@ The same worker is used for different endpoints to reuse shared code and depende -Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set environment variables to configure the first rows worker (`FIRST_ROWS_` prefix): +Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`. + +Set environment variables to configure the first rows worker (`FIRST_ROWS_` prefix): @@ -19 +38 @@ Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set envi -- `FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE`: the maximum size in bytes of the dataset to fallback in normal mode if streaming fails. Note that it requires to have the size in the info metadata. Set to `0` to disable the fallback. Defaults to `100_000_000`. +- `FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE`: the maximum size in bytes of the dataset to fall back into normal mode if streaming fails. Note that it requires to have the size in the info metadata. Set to `0` to disable the fallback. Defaults to `100_000_000`. @@ -21 +40 @@ Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set envi -- `FIRST_ROWS_MAX_NUMBER`: the max number of rows fetched by the worker for the split, and provided in the /first-rows endpoint response. Defaults to `100`. +- `FIRST_ROWS_MAX_NUMBER`: the max number of rows fetched by the worker for the split and provided in the /first-rows endpoint response. Defaults to `100`. @@ -23 +42,7 @@ Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set envi -- `FIRST_ROWS_MIN_NUMBER`: the min number of rows fetched by the worker for the split, and provided in the /first-rows endpoint response. Defaults to `10`. +- `FIRST_ROWS_MIN_NUMBER`: the min number of rows fetched by the worker for the split and provided in the /first-rows endpoint response. Defaults to `10`. + +Also, set the assets-related configuration for the first-rows worker. See [../../libs/libcommon/README.md](../../libs/libcommon/README.md). + +### Parquet worker + +Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/parquet`. @@ -25 +50 @@ Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set envi -### Datasets library +Set environment variables to configure the parquet worker (`PARQUET_` prefix): @@ -27 +52,8 @@ Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set envi -The following environment variables are used to configure two dependencies: the `datasets` and `numba` libraries: +- `PARQUET_BLOCKED_DATASETS`: comma-separated list of the blocked datasets. If empty, no dataset is blocked. Defaults to empty. +- `PARQUET_COMMIT_MESSAGE`: the git commit message when the worker uploads the parquet files to the Hub. Defaults to `Update parquet files`. +- `PARQUET_COMMITTER_HF_TOKEN`: the user token (https://huggingface.co/settings/tokens) to commit the parquet files to the Hub. The user must be allowed to create the `refs/convert/parquet` branch (see `PARQUET_TARGET_REVISION`) ([Hugging Face organization](https://huggingface.co/huggingface) members have this right). It must also have the right to push to the `refs/convert/parquet` branch ([Datasets maintainers](https://huggingface.co/datasets-maintainers) members have this right). It must have permission to write. If not set, the worker will fail. Defaults to None. +- `PARQUET_MAX_DATASET_SIZE`: the maximum size in bytes of the dataset to pre-compute the parquet files. Bigger datasets, or datasets without that information, are ignored. Defaults to `100_000_000`. +- `PARQUET_SOURCE_REVISION`: the git revision of the dataset to use to prepare the parquet files. Defaults to `main`. +- `PARQUET_SUPPORTED_DATASETS`: comma-separated list of the supported datasets. The worker does not test the size of supported datasets against the maximum dataset size. Defaults to empty. +- `PARQUET_TARGET_REVISION`: the git revision of the dataset where to store the parquet files. Make sure the committer token (`PARQUET_COMMITTER_HF_TOKEN`) has the permission to write there. Defaults to `refs/convert/parquet`. +- `PARQUET_URL_TEMPLATE`: the URL template to build the parquet file URLs. Defaults to `/datasets/%s/resolve/%s/%s`. @@ -29,3 +61 @@ The following environment variables are used to configure two dependencies: the -- `HF_DATASETS_CACHE`: directory where the `datasets` library will store the cached datasets data. Defaults to `~/.cache/huggingface/datasets`. -- `HF_MODULES_CACHE`: directory where the `datasets` library will store the cached datasets scripts. Defaults to `~/.cache/huggingface/modules`. -- `NUMBA_CACHE_DIR`: directory where the `numba` decorators (used by `librosa`) can write cache. Required on cloud infrastructure (see https://stackoverflow.com/a/63367171/7351594). +### Splits worker @@ -33 +63 @@ The following environment variables are used to configure two dependencies: the -If the Hub is not https://huggingface.co (i.e. if you set the `COMMON_HF_ENDPOINT` environment variable), you should also set the `HF_ENDPOINT` environment variable to the same value. See https://github.com/huggingface/datasets/pull/5196 for more details. +The splits worker does not need any additional configuration. diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index 00104cf7..14587a63 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -249 +249 @@ name = "certifi" -version = "2022.9.24" +version = "2022.12.7" @@ -780,0 +781,13 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag +[[package]] +name = "inflate64" +version = "0.3.1" +description = "deflate64 compression/decompression library" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +check = ["check-manifest", "flake8", "flake8-black", "flake8-deprecated", "isort (>=5.0.3)", "mypy (>=0.940)", "mypy-extensions (>=0.4.1)", "pygments", "readme-renderer", "twine"] +docs = ["docutils", "sphinx (>=5.0)"] +test = ["pyannotate", "pytest"] + @@ -877 +890 @@ name = "libcommon" -version = "0.5.3" +version = "0.5.10" @@ -895 +908 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl" @@ -1438 +1451 @@ name = "py7zr" -version = "0.17.4" +version = "0.20.2" @@ -1442 +1455 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -1446,0 +1460 @@ brotlicffi = {version = ">=1.0.9.2", markers = "platform_python_implementation = +inflate64 = {version = ">=0.3.1", markers = "python_version > \"3.6\""} @@ -1448 +1462,2 @@ multivolumefile = ">=0.2.3" -pybcj = {version = ">=0.5.0", markers = "platform_python_implementation == \"CPython\""} +psutil = "*" +pybcj = ">=0.6.0" @@ -1450 +1465 @@ pycryptodomex = ">=3.6.6" -pyppmd = ">=0.17.0" +pyppmd = ">=0.18.1,<1.1.0" @@ -1455 +1470 @@ texttable = "*" -check = ["check-manifest", "flake8", "flake8-black", "flake8-deprecated", "isort (>=5.0.3)", "mypy (>=0.812)", "mypy-extensions (>=0.4.1)", "pygments", "readme-renderer", "twine"] +check = ["check-manifest", "flake8 (<5)", "flake8-black", "flake8-deprecated", "flake8-isort", "isort (>=5.0.3)", "mypy (>=0.940)", "mypy-extensions (>=0.4.1)", "pygments", "readme-renderer", "twine"] @@ -1457 +1472 @@ debug = ["pytest", "pytest-leaks", "pytest-profiling"] -docs = ["docutils", "sphinx (>=2.3)", "sphinx-a4doc", "sphinx-py3doc-enhanced-theme"] +docs = ["docutils", "sphinx (>=5.0)", "sphinx-a4doc", "sphinx-py3doc-enhanced-theme"] @@ -1888,8 +1902,0 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -[[package]] -name = "sklearn" -version = "0.0.post1" -description = "deprecated sklearn package, use scikit-learn instead" -category = "main" -optional = false -python-versions = "*" - @@ -2289,0 +2297,8 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=5.2,<6.0)", "isort (>=5.0.6,<6. +[[package]] +name = "types-psutil" +version = "5.9.5.5" +description = "Typing stubs for psutil" +category = "dev" +optional = false +python-versions = "*" + @@ -2444 +2459 @@ python-versions = "3.9.15" -content-hash = "725e9a80c187974440610ef2c87ae0d2505dc02d8605baa8e889a0b62d3fdb97" +content-hash = "412449e8599d28918b44d286a04565b94cd5bc5d6a49bce47fb447e6ae902f14" @@ -2752,2 +2767,2 @@ certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, @@ -3239,0 +3255,71 @@ importlib-metadata = [ +inflate64 = [ + {file = "inflate64-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d4e2a337c6c03b0e96ccd79940cbb04fe2063974d56fff6d78f8d57839546c57"}, + {file = "inflate64-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c142fbbbfbe0877fe821ff8bc4cc10f96d344b7400721579b3d17deeae28f59"}, + {file = "inflate64-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a17e1dd1a5a872edfc02bc4a048868ada4865a3f4ee3ad5d224b192f2e53df7"}, + {file = "inflate64-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf41f82dd4e90e8684c7be4583d7232bd800a561f3ed0241c84e39148861887"}, + {file = "inflate64-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6059eaba5044739ad6424588e845bd856f89a1a18f1addc31b97c49f02f68728"}, + {file = "inflate64-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c5b2eb7e89d550d287774dea7d429ee24ce44ca34499a6cef113a14f108e700"}, + {file = "inflate64-0.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1d861fed6b2098d1862b64db9df650b9bd41fc41caa9fcaeee399079342aa4a8"}, + {file = "inflate64-0.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e32a78c81afba5699569c3493066ecb38fb45ccdf4c35b3c2232c9c2585b5257"}, + {file = "inflate64-0.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42a6ef375b3e7059bd52993a0938f2bf97725cb5dc380f0c4dbaa9fc3780e025"}, + {file = "inflate64-0.3.1-cp310-cp310-win32.whl", hash = "sha256:664929528047b6b472852a4c0d12b4b9cf6e663059ba64ebd10f08aa56365755"}, + {file = "inflate64-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:08c3b03514d4b849901762a32a45eeba7fd5d784fec698eca6975f41cca33672"}, + {file = "inflate64-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c71821f93c931ae379cf9c9bbdd7099738fa00802ccf2a5271e2b68bc67a6ab8"}, + {file = "inflate64-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3bacbe9d4b7c185011b59268223a010ed777a28ed8cf40efc74fab1b7262e904"}, + {file = "inflate64-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:130dfdca4bd38e588ea4f878bf62635e36f83ddf7f2842d1055d1c16a11890cf"}, + {file = "inflate64-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80a125dd5cb7b7985c05a78b0bfd7751249d0d84fc330901dbd9faa693e1f53f"}, + {file = "inflate64-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67efdfd21d7b99f30a43560b22264c1e580ff08ae9831e78c99445575962dbc7"}, + {file = "inflate64-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad84ac611eae17a961124c5fbe754b6982291a3945ab2b9c334a08e2e56f9ccc"}, + {file = "inflate64-0.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a1b481343f12641b1ae7a19135a70c44ecf020dccb670e02522c2b02db920851"}, + {file = "inflate64-0.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ad4cae5097bdff7e0bb1ab676d86ad08716597baa3b616e5b710a724f5d5cbc4"}, + {file = "inflate64-0.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:906a4b57df32f903e847766ca685e44ed3e7ee3a960fa94264d5e68b836d446d"}, + {file = "inflate64-0.3.1-cp311-cp311-win32.whl", hash = "sha256:0b0c8aa2fcdb1052d3bc6c5b5b1191b9c708d30e47af98ba0a8117ae1f6c9efc"}, + {file = "inflate64-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:473e0081c268ffa4b18683586b55170eb96d8b4fc684dd3ed9599c17c512d2e4"}, + {file = "inflate64-0.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f6737a575c6e7e818963d95a998be4c91484374961734cee97265f3c4c3b979"}, + {file = "inflate64-0.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c913b679f023f5907a54bfa9a6e438407ed4e40eee23ed19b4118128bdd091c"}, + {file = "inflate64-0.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29946840e6970d68e7739207ca21140c59ffebe7e02d28c7e86348166ce32418"}, + {file = "inflate64-0.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ba954600441eafe8f6f54eadffeac4d1ab2416d5d1a6b0ab403e50284ba457b"}, + {file = "inflate64-0.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2a4dac4ebc4ad58a4ac911e39cf97cd74906c0c82c16333887aa9f287e98d5b"}, + {file = "inflate64-0.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7b7966193f1bf23e050af72b4c4720dffa5f33471de7afea37ba0d0f0195adef"}, + {file = "inflate64-0.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7f8346e644de449a4a90dcb22971dea456398b6cc788102013675b11256ae47e"}, + {file = "inflate64-0.3.1-cp37-cp37m-win32.whl", hash = "sha256:f39b57974db0e85897fff40518da420f4c4012b73515ca6f415a472228fea288"}, + {file = "inflate64-0.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:74ceb9d172ce06572632bc8070d54b963455421e216013575383f991e722bb7d"}, + {file = "inflate64-0.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c28cb635ccb9aae399fbc8e82c85b89ea0a7bb2219e7d582bbc007a29fb6e149"}, + {file = "inflate64-0.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9297115bf144c585e9d6a746e851c64c81d8f1ce8b62da4885babe66c36f7d29"}, + {file = "inflate64-0.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a075b174bace5174828906c7c87019a2af3cc5707025f01ee0395fb4b88fd98e"}, + {file = "inflate64-0.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa7476129e7f81e67a9253470c3085a9fd75ec77e6fae3de61f7795138ce725e"}, + {file = "inflate64-0.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35e24ffd8d6225fbfe26c524b45ace1bb8956811bd79e9f3d523a721d51b0d4e"}, + {file = "inflate64-0.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:553cd992f02af574d2116c74ca48d7cf10894c6b9ba8159f488f3bfac3c201ae"}, + {file = "inflate64-0.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:82393e46b8ba2f8613d030f38c7c492b0896ff8803f7ff870677f25d3e5e7113"}, + {file = "inflate64-0.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:67e37d96ea2ee8257b12cde83a09e4f0276950268a7a2f777aee7de60db5ec72"}, + {file = "inflate64-0.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09dd0f8d6dee0da467c264dbd9bca8b33f9c915860fc3385f2a633640a65bd10"}, + {file = "inflate64-0.3.1-cp38-cp38-win32.whl", hash = "sha256:26e8319fd032c520203e2c001f1693c1c03774d85915900427e884011718f41d"}, + {file = "inflate64-0.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:ab8f9e14ba6495f440101751ba8aa371e4a52941b5e343c6f3e8c61021e2df5e"}, + {file = "inflate64-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:094ef56a87c7b7398d93af7bfe7f24f830f24b6e55b77426f6516cef43e05460"}, + {file = "inflate64-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:48fd2527a462374dc19be06301d6aa30a03190532f2f8bddfbc39b7158561750"}, + {file = "inflate64-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fde3f85864c84badb26f42d95639360e627fd09c529a76c46a06dbd7a5735c51"}, + {file = "inflate64-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5efd55c21b794601fd44b99b8e2f17498744f573116ce27a745bc5e08f0457e1"}, + {file = "inflate64-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71af8b23ac23bc9e9f776451c125be6320ad4589a7d5bcb5ab5e1fc61b4e58f"}, + {file = "inflate64-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ced0af509a31dcba0cd98ecdd06cb7c9ce66ebde78e0d99ba3515d4e991e34d0"}, + {file = "inflate64-0.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853f3442eceda8035072686533694ab833c4293d10c9d0685147200f0e964356"}, + {file = "inflate64-0.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a6bec3d2f30f6f2656e1c5a4147181e401c8d7026cd598d86ad5647c616fc618"}, + {file = "inflate64-0.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:84287d1d09fd879353d3ccadd43f3d8adea75e830476ddfd46d8849d36d25afe"}, + {file = "inflate64-0.3.1-cp39-cp39-win32.whl", hash = "sha256:a2f4aaa02f9a5ada944960428b6528a0a9d773925efc73485882f34bf42654be"}, + {file = "inflate64-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ff89f94823b2466bae45759fc324bd25bd20c490607a7d8407237cf64ccafa9"}, + {file = "inflate64-0.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c1faf43890dbfff31195f5d59e37e49824f5ff4be77d67f7144a6b953bbde51c"}, + {file = "inflate64-0.3.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1749da3a02b53035cde1cf95f885e78e0c2c49b201e97d368b3ba97e0f3d42c3"}, + {file = "inflate64-0.3.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17aaac096f40bd80dd72481831607a0846271d401ba3cd863386b8c244c7ebc1"}, + {file = "inflate64-0.3.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d807cfa9ddad940401ef04502eb367a77f569850f59c2e71670347d558a3830"}, + {file = "inflate64-0.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b7aa123c740f2f9798f72873e50d7c6d43664d12cad7a1405296079987bdb04a"}, + {file = "inflate64-0.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:91233b5300bbb7562804c3d07617e9ce2983e8434218991db98ef175491e417f"}, + {file = "inflate64-0.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:525bc309d8533ef9917e006284996ee7a9a71ac6dd19fb57c0f741ad0c805d4f"}, + {file = "inflate64-0.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90f95b92d0f672d11151cb964964d1723e2e3ce3a19d32d24aece1acdec1e287"}, + {file = "inflate64-0.3.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41504988023042452d2d84e4110c9ef4ff8ebd33cb90ba83e44b92c9a6753c43"}, + {file = "inflate64-0.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3c270d373ca3717dbeb9b171eea53cbf2c9d7471b9b5de1e57f165e60cf58037"}, + {file = "inflate64-0.3.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ac60868745f7bfbcd615329fbdc35997fa36043ce358a1c64d229ef448ebecf0"}, + {file = "inflate64-0.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d881b605b7448be451f02c59128dc5fac262dbd0dcff4638e702dc8c7bbb8ef0"}, + {file = "inflate64-0.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd04764d0bb830414788cae897d082bf6ad92324e571a5511bd7e1de4a0cdc67"}, + {file = "inflate64-0.3.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1987bbc482aa3e2e7fb72c70b22483cfaed3dbebc5ba6f9ac6f75240794709b"}, + {file = "inflate64-0.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4e7b0a598adaa11366ffbbb7b3d3110db29edd4b732d9336570891363b22b002"}, + {file = "inflate64-0.3.1.tar.gz", hash = "sha256:b52dd8fefd2ba179e5dfa18d6eca7e2fc822584616271c039d5ef1f9ca90c71c"}, +] @@ -3280 +3366 @@ libcommon = [ - {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, + {file = "libcommon-0.5.10-py3-none-any.whl", hash = "sha256:020c37fe46713f2f06c0cc5d6a45ac1e5e16c239311b0b5a89991038873f3c30"}, @@ -3937,2 +4023,2 @@ py7zr = [ - {file = "py7zr-0.17.4-py3-none-any.whl", hash = "sha256:69489b15f6ed1fdee1380092541f02fba193ea8fb5a854bc6ff9cd78cce3440d"}, - {file = "py7zr-0.17.4.tar.gz", hash = "sha256:1df67edaa8dd1613fc5a7de3354322e7bc75d989d6069924ce2d08bb7fabdd19"}, + {file = "py7zr-0.20.2-py3-none-any.whl", hash = "sha256:f6615a5bb07a9252034f23e518affcf4f62725c3632f23b7cfee86aef8bae779"}, + {file = "py7zr-0.20.2.tar.gz", hash = "sha256:791ef912a295b61b91c5fe0c23adeddb80bf13500308062c082b8fec6c8c9653"}, @@ -4660,3 +4745,0 @@ six = [ -sklearn = [ - {file = "sklearn-0.0.post1.tar.gz", hash = "sha256:76b9ed1623775168657b86b5fe966d45752e5c87f528de6240c38923b94147c5"}, -] @@ -4894,0 +4978,4 @@ typer = [ +types-psutil = [ + {file = "types-psutil-5.9.5.5.tar.gz", hash = "sha256:4f26fdb2cb064b274cbc6359fba4abf3b3a2993d7d4abc336ad0947568212c62"}, + {file = "types_psutil-5.9.5.5-py3-none-any.whl", hash = "sha256:e576bb81c74f7443b067e94f92435894d5dd561161bec3d6401727b63df009f0"}, +] diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index cdeca95c..837b68b7 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.10-py3-none-any.whl", develop = false } @@ -26 +26 @@ openpyxl = "^3.0.9" -py7zr = "^0.17.4" +py7zr = "^0.20.1" @@ -30 +30 @@ rarfile = "^4.0" -sklearn = "^0.0" +scikit-learn = "^1.0" @@ -50,0 +51 @@ pip-audit = "^2.4.6" +types-psutil = "^5.9.5.5" @@ -58 +58,0 @@ requires = ["poetry-core>=1.0.0"] -# addopts = "-k 'wip'" diff --git a/workers/datasets_based/src/datasets_based/config.py b/workers/datasets_based/src/datasets_based/config.py index 496df866..96656d20 100644 --- a/workers/datasets_based/src/datasets_based/config.py +++ b/workers/datasets_based/src/datasets_based/config.py @@ -3,0 +4,3 @@ +from pathlib import Path +from typing import List, Optional + @@ -7,0 +11 @@ from libcommon.config import ( + AssetsConfig, @@ -17,0 +22,2 @@ class DatasetsBasedConfig: + hf_datasets_cache: Path + max_disk_usage_percent = 90 # hard-coded, not configurable @@ -22,0 +29,7 @@ class DatasetsBasedConfig: + self._hf_datasets_cache = env.str(name="HF_DATASETS_CACHE", default=None) + self.setup() + + def setup(self) -> None: + self.hf_datasets_cache = ( + datasets.config.HF_DATASETS_CACHE if self._hf_datasets_cache is None else Path(self._hf_datasets_cache) + ) @@ -25,0 +39 @@ class FirstRowsConfig: + assets: AssetsConfig @@ -39,0 +54,24 @@ class FirstRowsConfig: + self.assets = AssetsConfig() + + +class ParquetConfig: + blocked_datasets: List[str] + supported_datasets: List[str] + commit_message: str + committer_hf_token: Optional[str] + max_dataset_size: int + source_revision: str + target_revision: str + url_template: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("PARQUET_"): + self.blocked_datasets = env.list(name="BLOCKED_DATASETS", default=[]) + self.supported_datasets = env.list(name="SUPPORTED_DATASETS", default=[]) + self.commit_message = env.str(name="COMMIT_MESSAGE", default="Update parquet files") + self.committer_hf_token = env.str(name="COMMITTER_HF_TOKEN", default=None) + self.max_dataset_size = env.int(name="MAX_DATASET_SIZE", default=100_000_000) + self.source_revision = env.str(name="SOURCE_REVISION", default="main") + self.target_revision = env.str(name="TARGET_REVISION", default="refs/convert/parquet") + self.url_template = env.str(name="URL_TEMPLATE", default="/datasets/%s/resolve/%s/%s") @@ -46 +83,0 @@ class AppConfig: - first_rows: FirstRowsConfig @@ -56 +92,0 @@ class AppConfig: - self.first_rows = FirstRowsConfig() diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py index a76b1c4a..18fcb382 100644 --- a/workers/datasets_based/src/datasets_based/worker.py +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -4,2 +3,0 @@ -from typing import Mapping, Type, Union - @@ -7,4 +5 @@ from datasets_based.config import AppConfig -from datasets_based.workers.first_rows import FirstRowsWorker -from datasets_based.workers.splits import SplitsWorker - -DatasetsBasedWorker = Union[SplitsWorker, FirstRowsWorker] +from datasets_based.workers import DatasetsBasedWorker, worker_class_by_endpoint @@ -16,4 +11 @@ def get_worker(app_config: AppConfig) -> DatasetsBasedWorker: - datasets_based_worker_classes: Mapping[str, Type[DatasetsBasedWorker]] = { - "/splits": SplitsWorker, - "/first-rows": FirstRowsWorker, - } + endpoint = app_config.datasets_based.endpoint @@ -21,2 +13 @@ def get_worker(app_config: AppConfig) -> DatasetsBasedWorker: - endpoint = app_config.datasets_based.endpoint - worker = datasets_based_worker_classes[endpoint](app_config=app_config, endpoint=endpoint) + worker = worker_class_by_endpoint[endpoint](app_config=app_config) @@ -25 +16 @@ def get_worker(app_config: AppConfig) -> DatasetsBasedWorker: - f"Unknown worker name '{endpoint}'. Available workers are: {list(datasets_based_worker_classes.keys())}" + f"Unknown worker name '{endpoint}'. Available workers are: {list(worker_class_by_endpoint.keys())}" diff --git a/workers/datasets_based/src/datasets_based/workers/__init__.py b/workers/datasets_based/src/datasets_based/workers/__init__.py index e69de29b..71632e41 100644 --- a/workers/datasets_based/src/datasets_based/workers/__init__.py +++ b/workers/datasets_based/src/datasets_based/workers/__init__.py @@ -0,0 +1,15 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import List, Type + +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker +from datasets_based.workers.first_rows import FirstRowsWorker +from datasets_based.workers.parquet import ParquetWorker +from datasets_based.workers.splits import SplitsWorker + +worker_classes: List[Type[DatasetsBasedWorker]] = [FirstRowsWorker, ParquetWorker, SplitsWorker] +worker_class_by_endpoint = {worker_class.get_endpoint(): worker_class for worker_class in worker_classes} + +# explicit re-export +__all__ = ["DatasetsBasedWorker"] diff --git a/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py new file mode 100644 index 00000000..403d3e81 --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/_datasets_based_worker.py @@ -0,0 +1,110 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import importlib.metadata +import json +import logging +import re +from abc import ABC, abstractmethod +from datetime import datetime +from hashlib import sha1 +from pathlib import Path +from typing import Optional + +import datasets.config +from libcommon.storage import init_dir, remove_dir +from libcommon.worker import Worker +from psutil import disk_usage + +from datasets_based.config import AppConfig, DatasetsBasedConfig + + +class DatasetsBasedWorker(Worker, ABC): + """Base class for workers that use datasets.""" + + datasets_based_config: DatasetsBasedConfig + + @staticmethod + @abstractmethod + def get_endpoint() -> str: + pass + + # the datasets library cache directories (for data, downloads, extraction, NOT for modules) + # the worker should have only one running job at the same time, then it should + # be safe to use a global variable (and to set the datasets cache globally) + datasets_cache: Optional[Path] = None + + def __init__(self, app_config: AppConfig): + super().__init__( + processing_step=app_config.processing_graph.graph.get_step(self.get_endpoint()), + # ^ raises if the step is not found + common_config=app_config.common, + queue_config=app_config.queue, + worker_config=app_config.worker, + version=importlib.metadata.version(__package__.split(".")[0]), + ) + self.datasets_based_config = app_config.datasets_based + + def has_storage(self) -> bool: + try: + usage = disk_usage(str(self.datasets_based_config.hf_datasets_cache)) + return usage.percent < self.datasets_based_config.max_disk_usage_percent + except Exception: + # if we can't get the disk usage, we let the process continue + return True + + def get_cache_subdirectory( + self, + date: datetime, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> str: + date_str = date.strftime("%Y-%m-%d-%H-%M-%S") + payload = (date_str, self.get_endpoint(), dataset, config, split, force) + hash_suffix = sha1(json.dumps(payload, sort_keys=True).encode(), usedforsecurity=False).hexdigest()[:8] + prefix = f"{date_str}-{self.get_endpoint()}-{dataset}"[:64] + subdirectory = f"{prefix}-{hash_suffix}" + return "".join([c if re.match(r"[\w-]", c) else "-" for c in subdirectory]) + + def set_datasets_cache(self, datasets_cache: Path) -> None: + self.datasets_cache = Path(init_dir(datasets_cache)) + datasets.config.HF_DATASETS_CACHE = self.datasets_cache + logging.debug(f"datasets data cache set to: {datasets.config.HF_DATASETS_CACHE}") + datasets.config.DOWNLOADED_DATASETS_PATH = ( + datasets.config.HF_DATASETS_CACHE / datasets.config.DOWNLOADED_DATASETS_DIR + ) + datasets.config.EXTRACTED_DATASETS_PATH = ( + datasets.config.HF_DATASETS_CACHE / datasets.config.EXTRACTED_DATASETS_DIR + ) + + def unset_datasets_cache(self) -> None: + previous_datasets_cache = self.datasets_cache + self.set_datasets_cache(self.datasets_based_config.hf_datasets_cache) + if previous_datasets_cache is not None and self.datasets_cache != previous_datasets_cache: + remove_dir(previous_datasets_cache) + logging.debug(f"temporary datasets data cache deleted: {previous_datasets_cache}") + self.datasets_cache = None + + def set_cache( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> None: + cache_subdirectory = self.get_cache_subdirectory( + date=datetime.now(), dataset=dataset, config=config, split=split, force=force + ) + self.set_datasets_cache(self.datasets_based_config.hf_datasets_cache / cache_subdirectory) + + def unset_cache(self) -> None: + self.unset_datasets_cache() + + def pre_compute( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> None: + self.set_cache(dataset=dataset, config=config, split=split, force=force) + + def post_compute( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> None: + # empty the cache after the job to save storage space + self.unset_cache() diff --git a/workers/datasets_based/src/datasets_based/workers/first_rows.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py index 001bf5ce..d595a32e 100644 --- a/workers/datasets_based/src/datasets_based/workers/first_rows.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -5 +4,0 @@ import functools -import importlib.metadata @@ -24 +23 @@ from libcommon.utils import orjson_dumps -from libcommon.worker import ConfigNotFoundError, SplitNotFoundError, Worker +from libcommon.worker import ConfigNotFoundError, SplitNotFoundError @@ -26 +25 @@ from libcommon.worker import ConfigNotFoundError, SplitNotFoundError, Worker -from datasets_based.config import AppConfig, CacheConfig, FirstRowsConfig +from datasets_based.config import AppConfig, FirstRowsConfig @@ -27,0 +27 @@ from datasets_based.features import get_cell_value +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker @@ -550,2 +550 @@ def compute_first_rows_response( -class FirstRowsWorker(Worker): - cache_config: CacheConfig +class FirstRowsWorker(DatasetsBasedWorker): @@ -554,11 +553,7 @@ class FirstRowsWorker(Worker): - def __init__(self, app_config: AppConfig, endpoint: str): - super().__init__( - processing_step=app_config.processing_graph.graph.get_step(endpoint), - # ^ raises if the step is not found - common_config=app_config.common, - queue_config=app_config.queue, - worker_config=app_config.worker, - version=importlib.metadata.version(__package__.split(".")[0]), - ) - self.cache_config = app_config.cache - self.first_rows_config = app_config.first_rows + @staticmethod + def get_endpoint() -> str: + return "/first-rows" + + def __init__(self, app_config: AppConfig): + super().__init__(app_config=app_config) + self.first_rows_config = FirstRowsConfig() @@ -579 +574,2 @@ class FirstRowsWorker(Worker): - assets_base_url=self.common_config.assets_base_url, + assets_base_url=self.first_rows_config.assets.base_url, + assets_directory=self.first_rows_config.assets.storage_directory, @@ -586 +581,0 @@ class FirstRowsWorker(Worker): - assets_directory=self.cache_config.assets_directory, diff --git a/workers/datasets_based/src/datasets_based/workers/parquet.py b/workers/datasets_based/src/datasets_based/workers/parquet.py new file mode 100644 index 00000000..3cd1a49d --- /dev/null +++ b/workers/datasets_based/src/datasets_based/workers/parquet.py @@ -0,0 +1,628 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import contextlib +import glob +import logging +import re +from http import HTTPStatus +from pathlib import Path +from typing import Any, List, Literal, Mapping, Optional, Tuple, TypedDict +from urllib.parse import quote + +import datasets.config +from datasets import ( + get_dataset_config_info, + get_dataset_config_names, + load_dataset_builder, +) +from datasets.data_files import EmptyDatasetError as _EmptyDatasetError +from huggingface_hub.hf_api import ( + CommitOperation, + CommitOperationAdd, + CommitOperationDelete, + DatasetInfo, + HfApi, + RepoFile, +) +from huggingface_hub.utils import RepositoryNotFoundError, RevisionNotFoundError +from libcommon.dataset import ask_access +from libcommon.exceptions import CustomError +from libcommon.worker import DatasetNotFoundError + +from datasets_based.config import AppConfig, ParquetConfig +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker + +ParquetWorkerErrorCode = Literal[ + "DatasetRevisionNotFoundError", + "EmptyDatasetError", + "ConfigNamesError", + "DatasetInBlockListError", + "DatasetTooBigFromHubError", + "DatasetTooBigFromDatasetsError", +] + + +class ParquetWorkerError(CustomError): + """Base class for exceptions in this module.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: ParquetWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__(message, status_code, str(code), cause, disclose_cause) + + +class DatasetRevisionNotFoundError(ParquetWorkerError): + """Raised when the revision of a dataset repository does not exist.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_FOUND, "DatasetRevisionNotFoundError", cause, False) + + +class ConfigNamesError(ParquetWorkerError): + """Raised when the configuration names could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "ConfigNamesError", cause, True) + + +class EmptyDatasetError(ParquetWorkerError): + """Raised when the dataset has no data.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) + + +class DatasetInBlockListError(ParquetWorkerError): + """Raised when the dataset is in the list of blocked datasets.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetInBlockListError", cause, False) + + +class DatasetTooBigFromHubError(ParquetWorkerError): + """Raised when the dataset size (sum of files on the Hub) is too big.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetTooBigFromHubError", cause, False) + + +class DatasetTooBigFromDatasetsError(ParquetWorkerError): + """Raised when the dataset size (sum of config sizes given by the datasets library) is too big.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "DatasetTooBigFromDatasetsError", cause, False) + + +class ParquetFileItem(TypedDict): + dataset: str + config: str + split: str + url: str + filename: str + size: int + + +class ParquetResponse(TypedDict): + parquet_files: List[ParquetFileItem] + + +DATASET_TYPE = "dataset" + + +class ParquetFile: + def __init__(self, local_file: str, local_dir: str, config: str): + if not local_file.startswith(local_dir): + raise ValueError(f"{local_file} is not in {local_dir}") + self.local_file = local_file + self.local_dir = local_dir + self.config = config + + def repo_file(self) -> str: + return f'{self.config}/{self.local_file.removeprefix(f"{self.local_dir}/")}' + + +# until https://github.com/huggingface/datasets/pull/5333 is merged +def get_dataset_infos(path: str, revision: Optional[str] = None, use_auth_token: Optional[str] = None): + """Get the meta information about a dataset, returned as a dict mapping config name to DatasetInfoDict. + + Args: + path (``str``): a dataset identifier on the Hugging Face Hub (list all available datasets and ids with + ``datasets.list_datasets()``) e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'`` + revision (Optional ``str``): + If specified, the dataset module will be loaded from the datasets repository at this version. + By default: + - it is set to the local version of the lib. + - it will also try to load it from the main branch if it's not available at the local version of the lib. + Specifying a version that is different from your local version of the lib might cause compatibility issues. + use_auth_token (``str``, optional): Optional string to use as Bearer token for remote files on the Datasets + Hub. + """ + config_names = get_dataset_config_names( + path=path, + revision=revision, + use_auth_token=use_auth_token, + ) + return { + config_name: get_dataset_config_info( + path=path, config_name=config_name, revision=revision, use_auth_token=use_auth_token + ) + for config_name in config_names + } + + +# TODO: use huggingface_hub's hf_hub_url after +# https://github.com/huggingface/huggingface_hub/issues/1082 +def hf_hub_url(repo_id: str, filename: str, hf_endpoint: str, revision: str, url_template: str) -> str: + return (hf_endpoint + url_template) % (repo_id, quote(revision, safe=""), filename) + + +p = re.compile(r"[\w]+-(?P<split>[\w]+?)(-[0-9]{5}-of-[0-9]{5})?.parquet") + + +def parse_repo_filename(filename: str) -> Tuple[str, str]: + parts = filename.split("/") + if len(parts) != 2: + raise ValueError(f"Invalid filename: {filename}") + config, fname = parts + m = p.match(fname) + if not m: + raise ValueError(f"Cannot parse {filename}") + split = m.group("split") + return config, split + + +def create_parquet_file_item( + repo_file: RepoFile, + dataset: str, + hf_endpoint: str, + target_revision: str, + url_template: str, +) -> ParquetFileItem: + if repo_file.size is None: + raise ValueError(f"Cannot get size of {repo_file.rfilename}") + config, split = parse_repo_filename(repo_file.rfilename) + return { + "dataset": dataset, + "config": config, + "split": split, + "url": hf_hub_url( + repo_id=dataset, + filename=repo_file.rfilename, + hf_endpoint=hf_endpoint, + revision=target_revision, + url_template=url_template, + ), + "filename": Path(repo_file.rfilename).name, + "size": repo_file.size, + } + + +def raise_if_blocked( + dataset: str, + blocked_datasets: List[str], +) -> None: + """ + Raise an error if the dataset is in the list of blocked datasets + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + blocked_datasets (`List[str]`): + The list of blocked datasets. If empty, no dataset is blocked. + Returns: + `None` + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + </Tip> + """ + if dataset in blocked_datasets: + raise DatasetInBlockListError( + "The parquet conversion has been disabled for this dataset for now. Please open an issue in" + " https://github.com/huggingface/datasets-server if you want this dataset to be supported." + ) + + +def get_dataset_info_or_raise( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + revision: str, +) -> DatasetInfo: + """ + Return the dataset info if possible. + Raise an error if the dataset cannot be accessed (does not exist, gated with extra fields, private) + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + revision (`str`): + The git revision (e.g. "main" or sha) of the dataset + Returns: + `DatasetInfo`: The dataset info + <Tip> + Raises the following errors: + - [`~libcommon.worker.DatasetNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + - [`~parquet.worker.DatasetRevisionNotFoundError`] + If the revision does not exist or cannot be accessed using the token. + </Tip> + """ + try: + dataset_info = HfApi(endpoint=hf_endpoint, token=hf_token).dataset_info( + repo_id=dataset, revision=revision, files_metadata=True + ) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + except RevisionNotFoundError as err: + raise DatasetRevisionNotFoundError("The dataset revision does not exist on the Hub.") from err + return dataset_info + + +def raise_if_too_big_from_hub( + dataset_info: DatasetInfo, + max_dataset_size: int, +) -> None: + """ + Raise an error if the dataset is too big to be converted to parquet + + Args: + dataset_info (`DatasetInfo`): + The dataset info + max_dataset_size (`int`): + The maximum size of the dataset in bytes + Returns: + `None` + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetTooBigFromHubError`] + If the dataset is too big to be converted to parquet + </Tip> + """ + dataset_size: int = sum(sibling.size for sibling in dataset_info.siblings if sibling.size is not None) + if dataset_size > max_dataset_size: + raise DatasetTooBigFromHubError( + f"The conversion to parquet is limited to datasets under {max_dataset_size} bytes. " + f"Current size of files on the hub is {dataset_size} bytes." + ) + + +def raise_if_too_big_from_datasets( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + revision: str, + max_dataset_size: int, +) -> None: + """ + Raise an error if the dataset is too big to be converted to parquet, as measured by the sum of the configs + sizes given by the datasets library + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + revision (`str`): + The git revision (e.g. "main" or sha) of the dataset + max_dataset_size (`int`): + The maximum size of the dataset in bytes + Returns: + `None` + <Tip> + Raises the following errors: + - [`ValueError`] + If the datasets.config.HF_ENDPOINT is not set to the expected value + - [`~parquet.worker.DatasetTooBigFromDatasetsError`] + If the dataset is too big to be converted to parquet + </Tip> + """ + if datasets.config.HF_ENDPOINT != hf_endpoint: + raise ValueError( + "datasets.config.HF_ENDPOINT should have already been set to {hf_endpoint}. " + f"Current value: {datasets.config.HF_ENDPOINT}. " + ) + dataset_size = 0 + with contextlib.suppress(Exception): + infos = get_dataset_infos(path=dataset, revision=revision, use_auth_token=hf_token) + dataset_size = sum(value.dataset_size for value in infos.values() if value.dataset_size is not None) + if dataset_size > max_dataset_size: + raise DatasetTooBigFromDatasetsError( + f"The conversion to parquet is limited to datasets under {max_dataset_size} bytes. " + f"Current size as given per the datasets library is {dataset_size} bytes." + ) + + +def raise_if_not_supported( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + committer_hf_token: Optional[str], + revision: str, + supported_datasets: List[str], + blocked_datasets: List[str], + max_dataset_size: int, +) -> None: + """ + Raise an error if the dataset is not supported: + - if the dataset is in the list of blocked datasets + - if the dataset cannot be accessed (does not exist, gated with extra fields, private) + - if the dataset is too big, and not in the list of supported datasets + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + committer_hf_token (`str`, `optional`): + A user authentication token (See https://huggingface.co/settings/token) with write access. It must: + - be part of the `huggingface` organization (to create the ref/convert/parquet "branch") + - be part of the `datasets-maintainers` organization (to push to the ref/convert/parquet "branch") + revision (`str`): + The git revision (e.g. "main" or sha) of the dataset + supported_datasets (`List[str]`): + The list of supported datasets, saving the blocked datasets. If empty, all datasets are supported + (saving the blocked datasets). + blocked_datasets (`List[str]`): + The list of blocked datasets. If empty, no dataset is blocked. + max_dataset_size (`int`): + The maximum size of a dataset in bytes. If the dataset is under the limit (which means that the size + can be fetched), it will be allowed. + Returns: + `ParquetResponseResult`: An object with the parquet_response + (dataset and list of parquet files) and the dataset_git_revision (sha) if any. + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) + - ['~requests.exceptions.HTTPError']: any other error when asking access + - [`~parquet.worker.DatasetRevisionNotFoundError`] + If the revision does not exist or cannot be accessed using the token. + - [`~parquet.worker.DatasetTooBigFromHubError`] + If the dataset is too big to be converted to parquet + - [`ValueError`] + If the datasets.config.HF_ENDPOINT is not set to the expected value + - [`~parquet.worker.DatasetTooBigFromDatasetsError`] + If the dataset is too big to be converted to parquet + </Tip> + """ + raise_if_blocked(dataset=dataset, blocked_datasets=blocked_datasets) + ask_access(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=committer_hf_token) + dataset_info = get_dataset_info_or_raise( + dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, revision=revision + ) + if dataset in supported_datasets: + return + raise_if_too_big_from_datasets( + dataset=dataset, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + revision=revision, + max_dataset_size=max_dataset_size, + ) + raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=max_dataset_size) + + +def compute_parquet_response( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str], + committer_hf_token: Optional[str], + source_revision: str, + target_revision: str, + commit_message: str, + url_template: str, + supported_datasets: List[str], + blocked_datasets: List[str], + max_dataset_size: int, +) -> ParquetResponse: + """ + Get the response of /parquet for one specific dataset on huggingface.co. + It is assumed that the dataset can be accessed with the token. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, `optional`): + An app authentication token with read access to all the datasets. + committer_hf_token (`str`, `optional`): + A user authentication token (See https://huggingface.co/settings/token) with write access. It must: + - be part of the `huggingface` organization (to create the ref/convert/parquet "branch") + - be part of the `datasets-maintainers` organization (to push to the ref/convert/parquet "branch") + source_revision (`str`): + The git revision (e.g. "main" or sha) of the dataset used to prepare the parquet files + target_revision (`str`): + The target git revision (e.g. "ref/convert/parquet") of the dataset where to store the parquet files + commit_message (`str`): + The commit message to use when storing the parquet files + url_template (`str`): + The template to use to build the parquet file url + supported_datasets (`List[str]`): + The list of supported datasets, saving the blocked datasets. If empty, all datasets are supported + (saving the blocked datasets). + blocked_datasets (`List[str]`): + The list of blocked datasets. If empty, no dataset is blocked. + max_dataset_size (`int`): + The maximum size of a dataset in bytes. If the dataset is under the limit (which means that the size + can be fetched), it will be allowed. + Returns: + `ParquetResponseResult`: An object with the parquet_response + (dataset and list of parquet files) and the dataset_git_revision (sha) if any. + <Tip> + Raises the following errors: + - [`~parquet.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset, or if the dataset is private + (private datasets are not supported by the datasets server) + - ['~requests.exceptions.HTTPError']: any other error when asking access + - [`~parquet.worker.DatasetRevisionNotFoundError`] + If the revision does not exist or cannot be accessed using the token. + - [`~parquet.worker.DatasetTooBigFromHubError`] + If the dataset is too big to be converted to parquet + - [`ValueError`] + If the datasets.config.HF_ENDPOINT is not set to the expected value + - [`~parquet.worker.DatasetTooBigFromDatasetsError`] + If the dataset is too big to be converted to parquet + - [`~parquet.worker.EmptyDatasetError`] + The dataset is empty. + - [`~parquet.worker.ConfigNamesError`] + If the list of configurations could not be obtained using the datasets library. + - [`~parquet.worker.DatasetInBlockListError`] + If the dataset is in the list of blocked datasets. + </Tip> + """ + logging.info(f"get splits for dataset={dataset}") + + raise_if_not_supported( + dataset=dataset, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + committer_hf_token=committer_hf_token, + revision=source_revision, + supported_datasets=supported_datasets, + blocked_datasets=blocked_datasets, + max_dataset_size=max_dataset_size, + ) + + hf_api = HfApi(endpoint=hf_endpoint, token=hf_token) + committer_hf_api = HfApi(endpoint=hf_endpoint, token=committer_hf_token) + + # create the target revision if it does not exist yet + try: + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + except RevisionNotFoundError: + # create the parquet_ref (refs/convert/parquet) + committer_hf_api.create_branch(repo_id=dataset, branch=target_revision, repo_type=DATASET_TYPE) + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=False) + + target_sha = target_dataset_info.sha + previous_files = [f.rfilename for f in target_dataset_info.siblings] + + # get the sorted list of configurations + try: + config_names = sorted( + get_dataset_config_names(path=dataset, revision=source_revision, use_auth_token=hf_token) + ) + except _EmptyDatasetError as err: + raise EmptyDatasetError("The dataset is empty.", cause=err) from err + except Exception as err: + raise ConfigNamesError("Cannot get the configuration names for the dataset.", cause=err) from err + + # prepare the parquet files locally + parquet_files: List[ParquetFile] = [] + for config in config_names: + builder = load_dataset_builder(path=dataset, name=config, revision=source_revision, use_auth_token=hf_token) + builder.download_and_prepare( + file_format="parquet", use_auth_token=hf_token + ) # the parquet files are stored in the cache dir + parquet_files.extend( + ParquetFile(local_file=local_file, local_dir=builder.cache_dir, config=config) + for local_file in glob.glob(f"{builder.cache_dir}**/*.parquet") + ) + + # send the files to the target revision + files_to_add = {parquet_file.repo_file(): parquet_file.local_file for parquet_file in parquet_files} + # don't delete the files we will update + files_to_delete = [file for file in previous_files if file not in files_to_add] + delete_operations: List[CommitOperation] = [CommitOperationDelete(path_in_repo=file) for file in files_to_delete] + add_operations: List[CommitOperation] = [ + CommitOperationAdd(path_in_repo=file, path_or_fileobj=local_file) + for (file, local_file) in files_to_add.items() + ] + committer_hf_api.create_commit( + repo_id=dataset, + repo_type=DATASET_TYPE, + revision=target_revision, + operations=delete_operations + add_operations, + commit_message=commit_message, + parent_commit=target_sha, + ) + + # call the API again to get the list of parquet files + target_dataset_info = hf_api.dataset_info(repo_id=dataset, revision=target_revision, files_metadata=True) + repo_files = [repo_file for repo_file in target_dataset_info.siblings if repo_file.rfilename.endswith(".parquet")] + # we might want to check if the sha of the parquet files is the same as the one we just uploaded + # we could also check that the list of parquet files is exactly what we expect + # let's not over engineer this for now. After all, what is on the Hub is the source of truth + # and the /parquet response is more a helper to get the list of parquet files + return { + "parquet_files": [ + create_parquet_file_item( + repo_file=repo_file, + dataset=dataset, + hf_endpoint=hf_endpoint, + target_revision=target_revision, + url_template=url_template, + ) + for repo_file in repo_files + ], + } + + +class ParquetWorker(DatasetsBasedWorker): + parquet_config: ParquetConfig + + @staticmethod + def get_endpoint() -> str: + return "/parquet" + + def __init__(self, app_config: AppConfig): + super().__init__(app_config=app_config) + self.parquet_config = ParquetConfig() + + def compute( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> Mapping[str, Any]: + return compute_parquet_response( + dataset=dataset, + hf_endpoint=self.common_config.hf_endpoint, + hf_token=self.common_config.hf_token, + committer_hf_token=self.parquet_config.committer_hf_token, + source_revision=self.parquet_config.source_revision, + target_revision=self.parquet_config.target_revision, + commit_message=self.parquet_config.commit_message, + url_template=self.parquet_config.url_template, + supported_datasets=self.parquet_config.supported_datasets, + blocked_datasets=self.parquet_config.blocked_datasets, + max_dataset_size=self.parquet_config.max_dataset_size, + ) diff --git a/workers/datasets_based/src/datasets_based/workers/splits.py b/workers/datasets_based/src/datasets_based/workers/splits.py index 13f1022f..31e1c268 100644 --- a/workers/datasets_based/src/datasets_based/workers/splits.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -4 +3,0 @@ -import importlib.metadata @@ -18 +17 @@ from libcommon.simple_cache import delete_response, get_dataset_response_ids -from libcommon.worker import Queue, Worker +from libcommon.worker import Queue @@ -20 +19 @@ from libcommon.worker import Queue, Worker -from datasets_based.config import AppConfig +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker @@ -165,10 +164,4 @@ def compute_splits_response( -class SplitsWorker(Worker): - def __init__(self, app_config: AppConfig, endpoint: str): - super().__init__( - processing_step=app_config.processing_graph.graph.get_step(endpoint), - # ^ raises if the step is not found - common_config=app_config.common, - queue_config=app_config.queue, - worker_config=app_config.worker, - version=importlib.metadata.version(__package__.split(".")[0]), - ) +class SplitsWorker(DatasetsBasedWorker): + @staticmethod + def get_endpoint() -> str: + return "/splits" diff --git a/workers/datasets_based/tests/conftest.py b/workers/datasets_based/tests/conftest.py index 08f13bd4..f6b56567 100644 --- a/workers/datasets_based/tests/conftest.py +++ b/workers/datasets_based/tests/conftest.py @@ -3,0 +4,5 @@ +from pathlib import Path +from typing import Iterator + +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import _clean_cache_database @@ -6 +11 @@ from pytest import MonkeyPatch, fixture -from datasets_based.config import AppConfig +from datasets_based.config import AppConfig, FirstRowsConfig, ParquetConfig @@ -8,2 +13 @@ from datasets_based.config import AppConfig -# Import fixture modules as plugins -pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixtures.hub"] +from .constants import CI_APP_TOKEN, CI_HUB_ENDPOINT, CI_URL_TEMPLATE, CI_USER_TOKEN @@ -12,12 +16,4 @@ pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixt -# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 -@fixture(scope="session") -def monkeypatch_session(hf_endpoint: str, hf_token: str): - monkeypatch_session = MonkeyPatch() - monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") - monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") - monkeypatch_session.setenv("COMMON_HF_ENDPOINT", hf_endpoint) - monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) - monkeypatch_session.setenv("COMMON_ASSETS_BASE_URL", "http://localhost/assets") - monkeypatch_session.setenv("FIRST_ROWS_MAX_NUMBER", "7") - yield monkeypatch_session - monkeypatch_session.undo() +@fixture +def datasets_cache_directory(tmp_path: Path) -> Path: + return tmp_path / "datasets" + @@ -24,0 +21,3 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): +@fixture +def modules_cache_directory(tmp_path: Path) -> Path: + return tmp_path / "modules" @@ -25,0 +25,2 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 @@ -27 +28,30 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): -def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: +def monkeypatch_session() -> Iterator[MonkeyPatch]: + mp = MonkeyPatch() + mp.setattr("huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE", CI_URL_TEMPLATE) + # ^ see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191056 + mp.setattr("datasets.config.HF_ENDPOINT", CI_HUB_ENDPOINT) + mp.setattr("datasets.config.HF_UPDATE_DOWNLOAD_COUNTS", False) + yield mp + mp.undo() + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture +def set_env_vars(datasets_cache_directory: Path, modules_cache_directory: Path) -> Iterator[MonkeyPatch]: + mp = MonkeyPatch() + mp.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + mp.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + mp.setenv("COMMON_HF_ENDPOINT", CI_HUB_ENDPOINT) + mp.setenv("COMMON_HF_TOKEN", CI_APP_TOKEN) + mp.setenv("ASSETS_BASE_URL", "http://localhost/assets") + mp.setenv("FIRST_ROWS_MAX_NUMBER", "7") + mp.setenv("PARQUET_MAX_DATASET_SIZE", "10_000") + mp.setenv("PARQUET_COMMITTER_HF_TOKEN", CI_USER_TOKEN) + mp.setenv("DATASETS_BASED_HF_DATASETS_CACHE", str(datasets_cache_directory)) + mp.setenv("HF_MODULES_CACHE", str(modules_cache_directory)) + yield mp + mp.undo() + + +@fixture +def app_config(set_env_vars: MonkeyPatch) -> Iterator[AppConfig]: @@ -31 +61,20 @@ def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: - return app_config + yield app_config + # Clean the database after each test. Must be done in test databases only, ensured by the check above! + # TODO: use a parameter to pass a reference to the database, instead of relying on the implicit global variable + # managed by mongoengine + _clean_cache_database() + _clean_queue_database() + + +@fixture +def first_rows_config() -> FirstRowsConfig: + return FirstRowsConfig() + + +@fixture +def parquet_config() -> ParquetConfig: + return ParquetConfig() + + +# Import fixture modules as plugins +pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixtures.hub"] diff --git a/workers/datasets_based/tests/constants.py b/workers/datasets_based/tests/constants.py new file mode 100644 index 00000000..d30c6ca6 --- /dev/null +++ b/workers/datasets_based/tests/constants.py @@ -0,0 +1,9 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +# see https://github.com/huggingface/moon-landing/blob/main/server/scripts/staging-seed-db.ts +CI_APP_TOKEN = "hf_datasets-server_token" +CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" +CI_URL_TEMPLATE = CI_HUB_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" +CI_USER = "__DUMMY_DATASETS_SERVER_USER__" +CI_USER_TOKEN = "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" diff --git a/workers/datasets_based/tests/fixtures/files.py b/workers/datasets_based/tests/fixtures/files.py index db2037c4..edab9e8f 100644 --- a/workers/datasets_based/tests/fixtures/files.py +++ b/workers/datasets_based/tests/fixtures/files.py @@ -6,0 +7 @@ import json +import pandas as pd @@ -27,0 +29,7 @@ def csv_path(tmp_path_factory: pytest.TempPathFactory) -> str: [email protected](scope="session") +def data_df(csv_path: str) -> pd.DataFrame: + # from the CSV file, not the DATA variable, because the CSV file does not respect the first column type + # we have to follow the same behavior + return pd.read_csv(csv_path) + + @@ -41,0 +50,17 @@ def jsonl_path(tmp_path_factory: pytest.TempPathFactory) -> str: + + [email protected](scope="session") +def extra_fields_readme(tmp_path_factory: pytest.TempPathFactory) -> str: + path = str(tmp_path_factory.mktemp("data") / "README.md") + lines = [ + "---", + 'extra_gated_prompt: "You agree not to attempt to determine the identity of individuals in this dataset"', + "extra_gated_fields:", + " Company: text", + " Country: text", + " I agree to use this model for non-commercial use ONLY: checkbox", + "---", + ] + with open(path, "w", newline="") as f: + f.writelines(f"{line}\n" for line in lines) + return path diff --git a/workers/datasets_based/tests/fixtures/hub.py b/workers/datasets_based/tests/fixtures/hub.py index 31489c83..14e598c5 100644 --- a/workers/datasets_based/tests/fixtures/hub.py +++ b/workers/datasets_based/tests/fixtures/hub.py @@ -7 +7 @@ import time -from contextlib import contextmanager, suppress +from contextlib import suppress @@ -11 +10,0 @@ from typing import Any, Iterable, List, Mapping, Optional, Tuple, TypedDict -import datasets.config @@ -21,0 +21,5 @@ from huggingface_hub.hf_api import ( +from ..constants import CI_HUB_ENDPOINT, CI_URL_TEMPLATE, CI_USER, CI_USER_TOKEN + +DATASET = "dataset" +hf_api = HfApi(endpoint=CI_HUB_ENDPOINT) + @@ -29,21 +32,0 @@ def get_default_config_split(dataset: str) -> Tuple[str, str, str]: -# see https://github.com/huggingface/moon-landing/blob/main/server/scripts/staging-seed-db.ts -CI_HUB_USER = "__DUMMY_DATASETS_SERVER_USER__" -CI_HUB_USER_API_TOKEN = "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" - -CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" -CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE = CI_HUB_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" - - [email protected](autouse=True) -def ci_hfh_hf_hub_url(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr( - "huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE", CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE - ) - - -# Ensure the datasets library uses the expected HuggingFace endpoint -datasets.config.HF_ENDPOINT = CI_HUB_ENDPOINT -# Don't increase the datasets download counts on huggingface.co -datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False - - @@ -51,2 +33,0 @@ def update_repo_settings( - hf_api: HfApi, - repo_id: str, @@ -53,0 +35 @@ def update_repo_settings( + repo_id: str, @@ -119,40 +100,0 @@ def update_repo_settings( [email protected](scope="session") -def hf_api(): - return HfApi(endpoint=CI_HUB_ENDPOINT) - - [email protected](scope="session") -def hf_token() -> str: - return CI_HUB_USER_API_TOKEN - - [email protected](scope="session") -def hf_endpoint() -> str: - return CI_HUB_ENDPOINT - - [email protected] -def cleanup_repo(hf_api: HfApi): - def _cleanup_repo(repo_id): - hf_api.delete_repo(repo_id=repo_id, token=CI_HUB_USER_API_TOKEN, repo_type="dataset") - - return _cleanup_repo - - [email protected] -def temporary_repo(cleanup_repo): - @contextmanager - def _temporary_repo(repo_id): - try: - yield repo_id - finally: - cleanup_repo(repo_id) - - return _temporary_repo - - -def create_unique_repo_name(prefix: str, user: str) -> str: - repo_name = f"{prefix}-{int(time.time() * 10e3)}" - return f"{user}/{repo_name}" - - @@ -160,9 +102 @@ def create_hub_dataset_repo( - *, - hf_api: HfApi, - hf_token: str, - prefix: str, - file_paths: List[str] = None, - dataset: Dataset = None, - private=False, - gated=False, - user=CI_HUB_USER, + *, prefix: str, file_paths: List[str] = None, dataset: Dataset = None, private=False, gated=False @@ -170 +104 @@ def create_hub_dataset_repo( - repo_id = create_unique_repo_name(prefix, user) + repo_id = f"{CI_USER}/{prefix}-{int(time.time() * 10e3)}" @@ -172 +106 @@ def create_hub_dataset_repo( - dataset.push_to_hub(repo_id=repo_id, private=private, token=hf_token, embed_external_files=True) + dataset.push_to_hub(repo_id=repo_id, private=private, token=CI_USER_TOKEN, embed_external_files=True) @@ -174 +108 @@ def create_hub_dataset_repo( - hf_api.create_repo(repo_id=repo_id, token=hf_token, repo_type="dataset", private=private) + hf_api.create_repo(repo_id=repo_id, token=CI_USER_TOKEN, repo_type=DATASET, private=private) @@ -176 +110 @@ def create_hub_dataset_repo( - update_repo_settings(hf_api, repo_id, token=hf_token, gated=gated, repo_type="dataset") + update_repo_settings(repo_id=repo_id, token=CI_USER_TOKEN, gated=gated, repo_type=DATASET) @@ -180 +114 @@ def create_hub_dataset_repo( - token=hf_token, + token=CI_USER_TOKEN, @@ -184 +118 @@ def create_hub_dataset_repo( - repo_type="dataset", + repo_type=DATASET, @@ -188,0 +123,7 @@ def create_hub_dataset_repo( +def delete_hub_dataset_repo(repo_id: str) -> None: + with suppress(requests.exceptions.HTTPError, ValueError): + hf_api.delete_repo(repo_id=repo_id, token=CI_USER_TOKEN, repo_type=DATASET) + + +# TODO: factor all the datasets fixture with one function that manages the yield and deletion + @@ -190,3 +131,3 @@ def create_hub_dataset_repo( [email protected](scope="session", autouse=True) -def hub_public_empty(hf_api: HfApi, hf_token: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="empty") [email protected](scope="session") +def hub_public_empty() -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="empty") @@ -194,2 +135 @@ def hub_public_empty(hf_api: HfApi, hf_token: str) -> Iterable[str]: - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -198,3 +138,3 @@ def hub_public_empty(hf_api: HfApi, hf_token: str) -> Iterable[str]: [email protected](scope="session", autouse=True) -def hub_public_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="csv", file_paths=[csv_path]) [email protected](scope="session") +def hub_public_csv(csv_path: str) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="csv", file_paths=[csv_path]) @@ -202,2 +142 @@ def hub_public_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str] - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -206,5 +145,3 @@ def hub_public_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str] [email protected](scope="session", autouse=True) -def hub_private_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo( - hf_api=hf_api, hf_token=hf_token, prefix="csv_private", file_paths=[csv_path], private=True - ) [email protected](scope="session") +def hub_private_csv(csv_path: str) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="csv_private", file_paths=[csv_path], private=True) @@ -212,2 +149 @@ def hub_private_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -216,5 +152,3 @@ def hub_private_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str [email protected](scope="session", autouse=True) -def hub_gated_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo( - hf_api=hf_api, hf_token=hf_token, prefix="csv_gated", file_paths=[csv_path], gated=True - ) [email protected](scope="session") +def hub_gated_csv(csv_path: str) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="csv_gated", file_paths=[csv_path], gated=True) @@ -222,2 +156 @@ def hub_gated_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -226,3 +159,3 @@ def hub_gated_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: [email protected](scope="session", autouse=True) -def hub_public_jsonl(hf_api: HfApi, hf_token: str, jsonl_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="jsonl", file_paths=[jsonl_path]) [email protected](scope="session") +def hub_public_jsonl(jsonl_path: str) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="jsonl", file_paths=[jsonl_path]) @@ -230,2 +163 @@ def hub_public_jsonl(hf_api: HfApi, hf_token: str, jsonl_path: str) -> Iterable[ - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -234,3 +166,5 @@ def hub_public_jsonl(hf_api: HfApi, hf_token: str, jsonl_path: str) -> Iterable[ [email protected](scope="session", autouse=True) -def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="audio", dataset=datasets["audio"]) [email protected](scope="session") +def hub_gated_extra_fields_csv(csv_path: str, extra_fields_readme: str) -> Iterable[str]: + repo_id = create_hub_dataset_repo( + prefix="csv_extra_fields_gated", file_paths=[csv_path, extra_fields_readme], gated=True + ) @@ -238,2 +172 @@ def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Datase - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -242,3 +175,3 @@ def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Datase [email protected](scope="session", autouse=True) -def hub_public_image(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="image", dataset=datasets["image"]) [email protected](scope="session") +def hub_public_audio(datasets: Mapping[str, Dataset]) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="audio", dataset=datasets["audio"]) @@ -246,2 +179 @@ def hub_public_image(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Datase - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -250,5 +182,3 @@ def hub_public_image(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Datase [email protected](scope="session", autouse=True) -def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: - repo_id = create_hub_dataset_repo( - hf_api=hf_api, hf_token=hf_token, prefix="images_list", dataset=datasets["images_list"] - ) [email protected](scope="session") +def hub_public_image(datasets: Mapping[str, Dataset]) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="image", dataset=datasets["image"]) @@ -256,2 +186 @@ def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Mapping[str, - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) @@ -260,3 +189,3 @@ def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Mapping[str, [email protected](scope="session", autouse=True) -def hub_public_big(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="big", dataset=datasets["big"]) [email protected](scope="session") +def hub_public_images_list(datasets: Mapping[str, Dataset]) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="images_list", dataset=datasets["images_list"]) @@ -264,2 +193,8 @@ def hub_public_big(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset] - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + delete_hub_dataset_repo(repo_id=repo_id) + + [email protected](scope="session") +def hub_public_big(datasets: Mapping[str, Dataset]) -> Iterable[str]: + repo_id = create_hub_dataset_repo(prefix="big", dataset=datasets["big"]) + yield repo_id + delete_hub_dataset_repo(repo_id=repo_id) @@ -271,0 +207 @@ class HubDatasetTest(TypedDict): + parquet_response: Any @@ -316,0 +253,21 @@ def create_first_rows_response(dataset: str, cols: Mapping[str, Any], rows: List +def create_parquet_response(dataset: str, filename: str, size: int): + dataset, config, split = get_default_config_split(dataset) + return { + "parquet_files": [ + { + "dataset": dataset, + "config": config, + "split": split, + "url": CI_URL_TEMPLATE.format( + repo_id=f"datasets/{dataset}", revision="refs%2Fconvert%2Fparquet", filename=f"{config}/{filename}" + ), + "filename": filename, + "size": size, + } + ], + } + + +CSV_PARQUET_SIZE = 1_865 +AUDIO_PARQUET_SIZE = 1_383 + @@ -418 +375 @@ BIG_rows = ["a" * 1_234 for _ in range(4_567)] [email protected](scope="session", autouse=True) [email protected](scope="session") @@ -424,0 +382 @@ def hub_datasets( + hub_gated_extra_fields_csv, @@ -434,0 +393 @@ def hub_datasets( + "parquet_response": None, @@ -439,0 +399 @@ def hub_datasets( + "parquet_response": None, @@ -444,0 +405,3 @@ def hub_datasets( + "parquet_response": create_parquet_response( + dataset=hub_public_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + ), @@ -449,0 +413,3 @@ def hub_datasets( + "parquet_response": create_parquet_response( + dataset=hub_private_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + ), @@ -454,0 +421,3 @@ def hub_datasets( + "parquet_response": create_parquet_response( + dataset=hub_gated_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + ), @@ -459,0 +429,9 @@ def hub_datasets( + "parquet_response": None, + }, + "gated_extra_fields": { + "name": hub_gated_extra_fields_csv, + "splits_response": create_splits_response(hub_gated_extra_fields_csv, None, None), + "first_rows_response": create_first_rows_response(hub_gated_extra_fields_csv, DATA_cols, DATA_rows), + "parquet_response": create_parquet_response( + dataset=hub_gated_extra_fields_csv, filename="csv-train.parquet", size=CSV_PARQUET_SIZE + ), @@ -466,0 +445,3 @@ def hub_datasets( + "parquet_response": create_parquet_response( + dataset=hub_public_audio, filename="parquet-train.parquet", size=AUDIO_PARQUET_SIZE + ), @@ -473,0 +455 @@ def hub_datasets( + "parquet_response": None, @@ -480,0 +463 @@ def hub_datasets( + "parquet_response": None, @@ -485,0 +469 @@ def hub_datasets( + "parquet_response": None, diff --git a/workers/datasets_based/tests/test_features.py b/workers/datasets_based/tests/test_features.py index f41ca73f..745b216a 100644 --- a/workers/datasets_based/tests/test_features.py +++ b/workers/datasets_based/tests/test_features.py @@ -12 +12 @@ from datasets import Audio, Dataset, Image, Value -from datasets_based.config import AppConfig +from datasets_based.config import AppConfig, FirstRowsConfig @@ -59,0 +60 @@ def test_value( + first_rows_config: FirstRowsConfig, @@ -73,2 +74,2 @@ def test_value( - assets_base_url=app_config.common.assets_base_url, - assets_directory=app_config.cache.assets_directory, + assets_base_url=first_rows_config.assets.base_url, + assets_directory=first_rows_config.assets.storage_directory, @@ -300,0 +302 @@ def test_others( + first_rows_config: FirstRowsConfig, @@ -316,2 +318,2 @@ def test_others( - assets_base_url=app_config.common.assets_base_url, - assets_directory=app_config.cache.assets_directory, + assets_base_url=first_rows_config.assets.base_url, + assets_directory=first_rows_config.assets.storage_directory, diff --git a/workers/datasets_based/tests/workers/__init__.py b/workers/datasets_based/tests/workers/__init__.py index e69de29b..1e9d0c5a 100644 --- a/workers/datasets_based/tests/workers/__init__.py +++ b/workers/datasets_based/tests/workers/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. diff --git a/workers/datasets_based/tests/workers/test__datasets_based_worker.py b/workers/datasets_based/tests/workers/test__datasets_based_worker.py new file mode 100644 index 00000000..5cec7039 --- /dev/null +++ b/workers/datasets_based/tests/workers/test__datasets_based_worker.py @@ -0,0 +1,115 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import os +from datetime import datetime +from pathlib import Path +from typing import Any, Mapping, Optional + +import datasets.config +import pytest + +from datasets_based.config import AppConfig +from datasets_based.workers._datasets_based_worker import DatasetsBasedWorker + + +class DummyWorker(DatasetsBasedWorker): + @staticmethod + def get_endpoint() -> str: + return "/splits" + # ^ borrowing the endpoint, so that the processing step exists and the worker can be initialized + # refactoring libcommon.processing_graph might help avoiding this + + def compute( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> Mapping[str, Any]: + if config == "raise": + raise ValueError("This is a test") + else: + return {} + + [email protected] +def worker(app_config: AppConfig) -> DummyWorker: + return DummyWorker(app_config=app_config) + + +def test_version(worker: DummyWorker) -> None: + assert len(worker.version.split(".")) == 3 + assert worker.compare_major_version(other_version="0.0.0") > 0 + assert worker.compare_major_version(other_version="1000.0.0") < 0 + + +def test_has_storage(worker: DummyWorker) -> None: + assert worker.has_storage() is True + worker.datasets_based_config.max_disk_usage_percent = 0 + # the directory does not exist yet, so it should return True + assert worker.has_storage() is True + os.makedirs(worker.datasets_based_config.hf_datasets_cache, exist_ok=True) + assert worker.has_storage() is False + + [email protected]( + "dataset,config,split,force,expected", + [ + ("user/dataset", "config", "split", True, "2022-11-07-12-34-56--splits-user-dataset-775e7212"), + # Every parameter variation changes the hash, hence the subdirectory + ("user/dataset", None, "split", True, "2022-11-07-12-34-56--splits-user-dataset-73c4b810"), + ("user/dataset", "config2", "split", True, "2022-11-07-12-34-56--splits-user-dataset-b6920bfb"), + ("user/dataset", "config", None, True, "2022-11-07-12-34-56--splits-user-dataset-36d21623"), + ("user/dataset", "config", "split2", True, "2022-11-07-12-34-56--splits-user-dataset-f60adde1"), + ("user/dataset", "config", "split", False, "2022-11-07-12-34-56--splits-user-dataset-f7985698"), + # The subdirectory length is truncated, and it always finishes with the hash + ( + "very_long_dataset_name_0123456789012345678901234567890123456789012345678901234567890123456789", + "config", + "split", + True, + "2022-11-07-12-34-56--splits-very_long_dataset_name_0123456789012-1457d125", + ), + ], +) +def test_get_cache_subdirectory( + worker: DummyWorker, dataset: str, config: Optional[str], split: Optional[str], force: bool, expected: str +) -> None: + date = datetime(2022, 11, 7, 12, 34, 56) + subdirectory = worker.get_cache_subdirectory(date=date, dataset=dataset, config=config, split=split, force=force) + assert subdirectory == expected + + +def test_set_and_unset_datasets_cache(worker: DummyWorker) -> None: + base_path = worker.datasets_based_config.hf_datasets_cache + dummy_path = base_path / "dummy" + worker.set_datasets_cache(dummy_path) + assert_datasets_cache_path(path=dummy_path, exists=True) + worker.unset_datasets_cache() + assert_datasets_cache_path(path=base_path, exists=True) + + +def test_set_and_unset_cache(worker: DummyWorker) -> None: + datasets_base_path = worker.datasets_based_config.hf_datasets_cache + worker.set_cache(dataset="user/dataset", config="config", split="split", force=True) + assert str(datasets.config.HF_DATASETS_CACHE).startswith(str(datasets_base_path)) + assert "-splits-user-dataset" in str(datasets.config.HF_DATASETS_CACHE) + worker.unset_cache() + assert_datasets_cache_path(path=datasets_base_path, exists=True) + + [email protected]("config", ["raise", "dont_raise"]) +def test_process(worker: DummyWorker, hub_public_csv: str, config: str) -> None: + # ^ this test requires an existing dataset, otherwise .process fails before setting the cache + # it must work in both cases: when the job fails and when it succeeds + datasets_base_path = worker.datasets_based_config.hf_datasets_cache + # the datasets library sets the cache to its own default + assert_datasets_cache_path(path=datasets_base_path, exists=False, equals=False) + result = worker.process(dataset=hub_public_csv, config=config, force=True) + assert result is (config != "raise") + # the configured cache is now set (after having deleted a subdirectory used for the job) + assert_datasets_cache_path(path=datasets_base_path, exists=True) + + +def assert_datasets_cache_path(path: Path, exists: bool, equals: bool = True) -> None: + assert path.exists() is exists + assert (datasets.config.HF_DATASETS_CACHE == path) is equals + assert (datasets.config.DOWNLOADED_DATASETS_PATH == path / datasets.config.DOWNLOADED_DATASETS_DIR) is equals + assert (datasets.config.EXTRACTED_DATASETS_PATH == path / datasets.config.EXTRACTED_DATASETS_DIR) is equals diff --git a/workers/datasets_based/tests/workers/test_first_rows.py b/workers/datasets_based/tests/workers/test_first_rows.py index f12f7c79..ddd059d8 100644 --- a/workers/datasets_based/tests/workers/test_first_rows.py +++ b/workers/datasets_based/tests/workers/test_first_rows.py @@ -4 +3,0 @@ - @@ -10,2 +9 @@ from libcommon.exceptions import CustomError -from libcommon.queue import _clean_queue_database -from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_response +from libcommon.simple_cache import DoesNotExist, get_response @@ -13 +11 @@ from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_resp -from datasets_based.config import AppConfig +from datasets_based.config import AppConfig, FirstRowsConfig @@ -23,7 +21 @@ from ..fixtures.hub import HubDatasets, get_default_config_split [email protected](autouse=True) -def clean_mongo_database() -> None: - _clean_cache_database() - _clean_queue_database() - - [email protected](autouse=True, scope="module") [email protected] @@ -31 +23 @@ def worker(app_config: AppConfig) -> FirstRowsWorker: - return FirstRowsWorker(app_config=app_config, endpoint="/first-rows") + return FirstRowsWorker(app_config=app_config) @@ -99,0 +92 @@ def test_number_rows( + first_rows_config: FirstRowsConfig, @@ -115 +108,2 @@ def test_number_rows( - assets_base_url=app_config.common.assets_base_url, + assets_base_url=first_rows_config.assets.base_url, + assets_directory=first_rows_config.assets.storage_directory, @@ -117,6 +111,5 @@ def test_number_rows( - max_size_fallback=app_config.first_rows.fallback_max_dataset_size, - rows_max_number=app_config.first_rows.max_number, - rows_min_number=app_config.first_rows.min_number, - rows_max_bytes=app_config.first_rows.max_bytes, - min_cell_bytes=app_config.first_rows.min_cell_bytes, - assets_directory=app_config.cache.assets_directory, + max_size_fallback=first_rows_config.fallback_max_dataset_size, + rows_max_number=first_rows_config.max_number, + rows_min_number=first_rows_config.min_number, + rows_max_bytes=first_rows_config.max_bytes, + min_cell_bytes=first_rows_config.min_cell_bytes, @@ -131 +124,2 @@ def test_number_rows( - assets_base_url=app_config.common.assets_base_url, + assets_base_url=first_rows_config.assets.base_url, + assets_directory=first_rows_config.assets.storage_directory, @@ -133,6 +127,5 @@ def test_number_rows( - max_size_fallback=app_config.first_rows.fallback_max_dataset_size, - rows_max_number=app_config.first_rows.max_number, - rows_min_number=app_config.first_rows.min_number, - rows_max_bytes=app_config.first_rows.max_bytes, - min_cell_bytes=app_config.first_rows.min_cell_bytes, - assets_directory=app_config.cache.assets_directory, + max_size_fallback=first_rows_config.fallback_max_dataset_size, + rows_max_number=first_rows_config.max_number, + rows_min_number=first_rows_config.min_number, + rows_max_bytes=first_rows_config.max_bytes, + min_cell_bytes=first_rows_config.min_cell_bytes, @@ -170,0 +164 @@ def test_truncation( + first_rows_config: FirstRowsConfig, @@ -180 +174,2 @@ def test_truncation( - assets_base_url=app_config.common.assets_base_url, + assets_base_url=first_rows_config.assets.base_url, + assets_directory=first_rows_config.assets.storage_directory, @@ -182 +177 @@ def test_truncation( - max_size_fallback=app_config.first_rows.fallback_max_dataset_size, + max_size_fallback=first_rows_config.fallback_max_dataset_size, @@ -187 +181,0 @@ def test_truncation( - assets_directory=app_config.cache.assets_directory, diff --git a/workers/datasets_based/tests/workers/test_parquet.py b/workers/datasets_based/tests/workers/test_parquet.py new file mode 100644 index 00000000..450ad376 --- /dev/null +++ b/workers/datasets_based/tests/workers/test_parquet.py @@ -0,0 +1,320 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import io +from http import HTTPStatus +from typing import Iterator, List + +import pandas as pd +import pytest +import requests +from libcommon.exceptions import CustomError +from libcommon.simple_cache import DoesNotExist, get_response + +from datasets_based.config import AppConfig, ParquetConfig +from datasets_based.workers.parquet import ( + DatasetInBlockListError, + DatasetTooBigFromDatasetsError, + DatasetTooBigFromHubError, + ParquetWorker, + compute_parquet_response, + get_dataset_info_or_raise, + parse_repo_filename, + raise_if_blocked, + raise_if_not_supported, + raise_if_too_big_from_datasets, + raise_if_too_big_from_hub, +) + +from ..fixtures.hub import HubDatasets + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 [email protected](scope="module", autouse=True) +def set_supported_datasets(hub_datasets: HubDatasets) -> Iterator[pytest.MonkeyPatch]: + mp = pytest.MonkeyPatch() + mp.setenv( + "PARQUET_BLOCKED_DATASETS", + ",".join(value["name"] for value in hub_datasets.values() if "jsonl" in value["name"]), + ) + mp.setenv( + "PARQUET_SUPPORTED_DATASETS", + ",".join(value["name"] for value in hub_datasets.values() if "big" not in value["name"]), + ) + yield mp + mp.undo() + + [email protected] +def worker(app_config: AppConfig) -> ParquetWorker: + return ParquetWorker(app_config=app_config) + + +def test_compute(worker: ParquetWorker, hub_datasets: HubDatasets) -> None: + dataset = hub_datasets["public"]["name"] + assert worker.process(dataset=dataset) is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + assert cached_response["worker_version"] == worker.version + assert cached_response["dataset_git_revision"] is not None + content = cached_response["content"] + assert len(content["parquet_files"]) == 1 + assert content == hub_datasets["public"]["parquet_response"] + + +def test_doesnotexist(worker: ParquetWorker) -> None: + dataset = "doesnotexist" + assert worker.process(dataset=dataset) is False + with pytest.raises(DoesNotExist): + get_response(kind=worker.processing_step.cache_kind, dataset=dataset) + + [email protected]( + "dataset,blocked,raises", + [ + ("public", ["public"], True), + ("public", ["public", "audio"], True), + ("public", ["audio"], False), + ("public", [], False), + ], +) +def test_raise_if_blocked(dataset: str, blocked: List[str], raises: bool) -> None: + if raises: + with pytest.raises(DatasetInBlockListError): + raise_if_blocked(dataset=dataset, blocked_datasets=blocked) + else: + raise_if_blocked(dataset=dataset, blocked_datasets=blocked) + + [email protected]( + "name,raises", + [("public", False), ("big", True)], +) +def test_raise_if_too_big_from_hub( + hub_datasets: HubDatasets, name: str, raises: bool, app_config: AppConfig, parquet_config: ParquetConfig +) -> None: + dataset = hub_datasets[name]["name"] + dataset_info = get_dataset_info_or_raise( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + revision="main", + ) + if raises: + with pytest.raises(DatasetTooBigFromHubError): + raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=parquet_config.max_dataset_size) + else: + raise_if_too_big_from_hub(dataset_info=dataset_info, max_dataset_size=parquet_config.max_dataset_size) + + [email protected]( + "name,raises", + [("public", False), ("big", True)], +) +def test_raise_if_too_big_from_datasets( + hub_datasets: HubDatasets, name: str, raises: bool, app_config: AppConfig, parquet_config: ParquetConfig +) -> None: + dataset = hub_datasets[name]["name"] + if raises: + with pytest.raises(DatasetTooBigFromDatasetsError): + raise_if_too_big_from_datasets( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + revision="main", + max_dataset_size=parquet_config.max_dataset_size, + ) + else: + raise_if_too_big_from_datasets( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + revision="main", + max_dataset_size=parquet_config.max_dataset_size, + ) + + [email protected]( + "in_list,raises", + [ + (True, False), + (False, True), + ], +) +def test_raise_if_not_supported( + hub_public_big: str, app_config: AppConfig, parquet_config: ParquetConfig, in_list: bool, raises: bool +) -> None: + if raises: + with pytest.raises(DatasetTooBigFromDatasetsError): + raise_if_not_supported( + dataset=hub_public_big, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + committer_hf_token=parquet_config.committer_hf_token, + revision="main", + max_dataset_size=parquet_config.max_dataset_size, + supported_datasets=[hub_public_big] if in_list else ["another_dataset"], + blocked_datasets=[], + ) + else: + raise_if_not_supported( + dataset=hub_public_big, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + committer_hf_token=parquet_config.committer_hf_token, + revision="main", + max_dataset_size=parquet_config.max_dataset_size, + supported_datasets=[hub_public_big] if in_list else ["another_dataset"], + blocked_datasets=[], + ) + + +def test_not_supported_if_big(worker: ParquetWorker, hub_public_big: str) -> None: + # Not in the list of supported datasets and bigger than the maximum size + assert worker.process(dataset=hub_public_big) is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_big) + assert cached_response["http_status"] == HTTPStatus.NOT_IMPLEMENTED + assert cached_response["error_code"] == "DatasetTooBigFromDatasetsError" + + +def test_supported_if_gated(worker: ParquetWorker, hub_gated_csv: str) -> None: + # Access should must be granted + assert worker.process(dataset=hub_gated_csv) is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_gated_csv) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + + [email protected] +def test_not_supported_if_gated_with_extra_fields(worker: ParquetWorker, hub_gated_extra_fields_csv: str) -> None: + # Access request should fail because extra fields in gated datasets are not supported + assert worker.process(dataset=hub_gated_extra_fields_csv) is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_gated_extra_fields_csv) + assert cached_response["http_status"] == HTTPStatus.NOT_FOUND + assert cached_response["error_code"] == "GatedExtraFieldsError" + + [email protected] +def test_blocked(worker: ParquetWorker, hub_public_jsonl: str) -> None: + # In the list of blocked datasets + assert worker.process(dataset=hub_public_jsonl) is False + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_jsonl) + assert cached_response["http_status"] == HTTPStatus.NOT_IMPLEMENTED + assert cached_response["error_code"] == "DatasetInBlockListError" + + [email protected] +def test_process_job(worker: ParquetWorker, hub_public_csv: str) -> None: + worker.queue.add_job(dataset=hub_public_csv) + result = worker.process_next_job() + assert result is True + + [email protected] [email protected]( + "name", + ["public", "audio", "gated"], +) +def test_compute_splits_response_simple_csv_ok( + hub_datasets: HubDatasets, name: str, app_config: AppConfig, parquet_config: ParquetConfig, data_df: pd.DataFrame +) -> None: + dataset = hub_datasets[name]["name"] + expected_parquet_response = hub_datasets[name]["parquet_response"] + result = compute_parquet_response( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + committer_hf_token=parquet_config.committer_hf_token, + source_revision=parquet_config.source_revision, + target_revision=parquet_config.target_revision, + commit_message=parquet_config.commit_message, + url_template=parquet_config.url_template, + supported_datasets=parquet_config.supported_datasets, + blocked_datasets=parquet_config.blocked_datasets, + max_dataset_size=parquet_config.max_dataset_size, + ) + assert result == expected_parquet_response + + # download the parquet file and check that it is valid + if name == "audio": + return + + if name == "public": + df = pd.read_parquet(result["parquet_files"][0]["url"], engine="auto") + else: + # in all these cases, the parquet files are not accessible without a token + with pytest.raises(Exception): + pd.read_parquet(result["parquet_files"][0]["url"], engine="auto") + r = requests.get( + result["parquet_files"][0]["url"], headers={"Authorization": f"Bearer {app_config.common.hf_token}"} + ) + assert r.status_code == HTTPStatus.OK, r.text + df = pd.read_parquet(io.BytesIO(r.content), engine="auto") + assert df.equals(data_df), df + + [email protected] [email protected]( + "name,error_code,cause", + [ + ("empty", "EmptyDatasetError", "EmptyDatasetError"), + ("does_not_exist", "DatasetNotFoundError", None), + ("gated_extra_fields", "GatedExtraFieldsError", None), + ("private", "DatasetNotFoundError", None), + ], +) +def test_compute_splits_response_simple_csv_error( + hub_datasets: HubDatasets, + name: str, + error_code: str, + cause: str, + app_config: AppConfig, + parquet_config: ParquetConfig, +) -> None: + dataset = hub_datasets[name]["name"] + with pytest.raises(CustomError) as exc_info: + compute_parquet_response( + dataset=dataset, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + committer_hf_token=parquet_config.committer_hf_token, + source_revision=parquet_config.source_revision, + target_revision=parquet_config.target_revision, + commit_message=parquet_config.commit_message, + url_template=parquet_config.url_template, + supported_datasets=parquet_config.supported_datasets, + blocked_datasets=parquet_config.blocked_datasets, + max_dataset_size=parquet_config.max_dataset_size, + ) + assert exc_info.value.code == error_code + if cause is None: + assert exc_info.value.disclose_cause is False + assert exc_info.value.cause_exception is None + else: + assert exc_info.value.disclose_cause is True + assert exc_info.value.cause_exception == cause + response = exc_info.value.as_response() + assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} + response_dict = dict(response) + # ^ to remove mypy warnings + assert response_dict["cause_exception"] == cause + assert isinstance(response_dict["cause_traceback"], list) + assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" + + [email protected]( + "filename,split,config,raises", + [ + ("config/builder-split.parquet", "split", "config", False), + ("config/builder-split-00000-of-00001.parquet", "split", "config", False), + ("builder-split-00000-of-00001.parquet", "split", "config", True), + ("config/builder-not-supported.parquet", "not-supported", "config", True), + ], +) +def test_parse_repo_filename(filename: str, split: str, config: str, raises: bool) -> None: + if raises: + with pytest.raises(Exception): + parse_repo_filename(filename) + else: + assert parse_repo_filename(filename) == (config, split) diff --git a/workers/datasets_based/tests/workers/test_splits.py b/workers/datasets_based/tests/workers/test_splits.py index cf335c72..8ec8252a 100644 --- a/workers/datasets_based/tests/workers/test_splits.py +++ b/workers/datasets_based/tests/workers/test_splits.py @@ -8,2 +8 @@ from libcommon.exceptions import CustomError -from libcommon.queue import _clean_queue_database -from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_response +from libcommon.simple_cache import DoesNotExist, get_response @@ -17,7 +16 @@ from ..fixtures.hub import HubDatasets [email protected](autouse=True) -def clean_mongo_database() -> None: - _clean_cache_database() - _clean_queue_database() - - [email protected](autouse=True, scope="module") [email protected] @@ -25,7 +18 @@ def worker(app_config: AppConfig) -> SplitsWorker: - return SplitsWorker(app_config=app_config, endpoint="/splits") - - -def test_version(worker: SplitsWorker) -> None: - assert len(worker.version.split(".")) == 3 - assert worker.compare_major_version(other_version="0.0.0") > 0 - assert worker.compare_major_version(other_version="1000.0.0") < 0 + return SplitsWorker(app_config=app_config)
c7cfaa8bc2192933b476e4e92d973b6eba306db7
Sylvain Lesage
2022-12-05T10:23:56
feat: 🎸 upgrade from python 3.9.6 to 3.9.15 (#658)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 5e52189f..acfce650 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -12 +12 @@ env: - python-version: 3.9.6 + python-version: 3.9.15 diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index c1e8be4c..1deb065c 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -17 +17 @@ env: - python-version: "3.9.6" + python-version: "3.9.15" @@ -57 +57 @@ jobs: - run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" + run: bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" @@ -60 +60 @@ jobs: - run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d')" + run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index edc98192..d7155b0f 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -18 +18 @@ env: - python-version: "3.9.6" + python-version: "3.9.15" diff --git a/.github/workflows/openapi-spec.yml b/.github/workflows/openapi-spec.yml index 987e870d..543fcf98 100644 --- a/.github/workflows/openapi-spec.yml +++ b/.github/workflows/openapi-spec.yml @@ -12 +12 @@ env: - python-version: 3.9.6 + python-version: 3.9.15 diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 90d250ab..b8ba9f36 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -26,2 +26,2 @@ cd libs/libcommon/ -pyenv install 3.9.6 -pyenv local 3.9.6 +pyenv install 3.9.15 +pyenv local 3.9.15 @@ -214 +214 @@ Logout and login again. -Install Python 3.9.6: +Install Python 3.9.15: @@ -217 +217 @@ Install Python 3.9.6: -$ pyenv install 3.9.6 +$ pyenv install 3.9.15 @@ -225 +225 @@ $ python --version -Python 3.9.6 +Python 3.9.15 @@ -250 +250 @@ Set the python version to use with poetry: -poetry env use 3.9.6 +poetry env use 3.9.15 diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index e544e4f7..2a34e6f8 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-c815296" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-cef5577" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-298bbc9", - "api": "huggingface/datasets-server-services-api:sha-b9b950d" + "admin": "huggingface/datasets-server-services-admin:sha-cef5577", + "api": "huggingface/datasets-server-services-api:sha-cef5577" @@ -12 +12 @@ - "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-d6a0b1e" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-cef5577" diff --git a/docs/.python-version b/docs/.python-version index 1635d0f5..b326afbc 100644 --- a/docs/.python-version +++ b/docs/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/docs/poetry.lock b/docs/poetry.lock index ea3d2e7b..a5d2185b 100644 --- a/docs/poetry.lock +++ b/docs/poetry.lock @@ -322 +322 @@ lock-version = "1.1" -python-versions = "3.9.6" +python-versions = "3.9.15" diff --git a/docs/pyproject.toml b/docs/pyproject.toml index 2038e729..4b021f69 100644 --- a/docs/pyproject.toml +++ b/docs/pyproject.toml @@ -9 +9 @@ hf-doc-builder = { extras = ["quality"], version = "0.3.0" } -python = "3.9.6" +python = "3.9.15" diff --git a/e2e/.python-version b/e2e/.python-version index 1635d0f5..b326afbc 100644 --- a/e2e/.python-version +++ b/e2e/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/e2e/poetry.lock b/e2e/poetry.lock index 50225dea..0e8dab51 100644 --- a/e2e/poetry.lock +++ b/e2e/poetry.lock @@ -124 +124 @@ name = "cyclonedx-python-lib" -version = "3.1.0" +version = "3.1.1" @@ -149 +149 @@ name = "filelock" -version = "3.8.0" +version = "3.8.1" @@ -156,2 +156,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -174 +174 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -178 +178 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -214 +214 @@ name = "huggingface-hub" -version = "0.11.0" +version = "0.11.1" @@ -271 +271 @@ name = "jsonschema" -version = "4.17.1" +version = "4.17.3" @@ -426 +426 @@ name = "pip-audit" -version = "2.4.6" +version = "2.4.7" @@ -621 +621 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -747,2 +747,2 @@ lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "7c71bf005cae43f81a3d6a2cec158515646b069d84a8ae0054ed5b61279ce7a3" +python-versions = "3.9.15" +content-hash = "1221c5dd97d09acd12a4a388d83084e57194bced3c29c0a3e24215597bf75f4e" @@ -807,2 +807,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, + {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, + {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, @@ -815,2 +815,2 @@ filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, + {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, @@ -823,2 +823,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -835,2 +835,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, + {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, + {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, @@ -851,2 +851,2 @@ jsonschema = [ - {file = "jsonschema-4.17.1-py3-none-any.whl", hash = "sha256:410ef23dcdbca4eaedc08b850079179883c2ed09378bd1f760d4af4aacfa28d7"}, - {file = "jsonschema-4.17.1.tar.gz", hash = "sha256:05b2d22c83640cde0b7e0aa329ca7754fbd98ea66ad8ae24aa61328dfe057fa3"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, @@ -977,2 +977,2 @@ pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, + {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, + {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, @@ -1095,2 +1095,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, diff --git a/e2e/pyproject.toml b/e2e/pyproject.toml index 03afc152..98744dfc 100644 --- a/e2e/pyproject.toml +++ b/e2e/pyproject.toml @@ -10 +10 @@ openapi-spec-validator = "^0.4.0" -python = "3.9.6" +python = "3.9.15" diff --git a/jobs/mongodb_migration/.python-version b/jobs/mongodb_migration/.python-version index 1635d0f5..b326afbc 100644 --- a/jobs/mongodb_migration/.python-version +++ b/jobs/mongodb_migration/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile index 85f9d9fc..301a9861 100644 --- a/jobs/mongodb_migration/Dockerfile +++ b/jobs/mongodb_migration/Dockerfile @@ -3 +3 @@ -FROM python:3.9.6-slim +FROM python:3.9.15-slim diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index e614a601..d8a9884d 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -143 +143 @@ name = "cyclonedx-python-lib" -version = "3.1.0" +version = "3.1.1" @@ -198 +198 @@ name = "filelock" -version = "3.8.0" +version = "3.8.1" @@ -205,2 +205,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -263 +263 @@ name = "huggingface-hub" -version = "0.11.0" +version = "0.11.1" @@ -320 +320 @@ name = "libcommon" -version = "0.5.0" +version = "0.5.3" @@ -324 +324 @@ optional = false -python-versions = "==3.9.6" +python-versions = "==3.9.15" @@ -338 +338 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" @@ -426 +426 @@ name = "orjson" -version = "3.8.2" +version = "3.8.3" @@ -492 +492 @@ name = "pip-audit" -version = "2.4.6" +version = "2.4.7" @@ -864,2 +864,2 @@ lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "e5eb92e8b71d1e68a29761ed4d1e58c3c03c5fe600f835584a19369a64c6d5c0" +python-versions = "3.9.15" +content-hash = "f50de0b55e954056cf522ef2b71dfa3677488f11668cd1479a478d7e544a9263" @@ -980,2 +980,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, + {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, + {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, @@ -996,2 +996,2 @@ filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, + {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, @@ -1016,2 +1016,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, + {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, + {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, @@ -1032 +1032 @@ libcommon = [ - {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:0267504716992f562382ff5029ace87444fd12793f2393f3800921d384a0fd52"}, + {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, @@ -1137,49 +1137,44 @@ orjson = [ - {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, - {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, - {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, - {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, - {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, - {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, - {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, - {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, + {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, + {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, + {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, + {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, + {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, + {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, @@ -1212,2 +1207,2 @@ pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, + {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, + {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 9680d852..d095265b 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10,2 +10,2 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl", develop = false } -python = "3.9.6" +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +python = "3.9.15" diff --git a/libs/libcommon/.python-version b/libs/libcommon/.python-version index 1635d0f5..b326afbc 100644 --- a/libs/libcommon/.python-version +++ b/libs/libcommon/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl new file mode 100644 index 00000000..3562f67e Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.3.tar.gz b/libs/libcommon/dist/libcommon-0.5.3.tar.gz new file mode 100644 index 00000000..8ba33bb3 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.3.tar.gz differ diff --git a/libs/libcommon/poetry.lock b/libs/libcommon/poetry.lock index 7452e963..1ec23a1e 100644 --- a/libs/libcommon/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -143 +143 @@ name = "cyclonedx-python-lib" -version = "3.1.0" +version = "3.1.1" @@ -198 +198 @@ name = "filelock" -version = "3.8.0" +version = "3.8.1" @@ -205,2 +205,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -263 +263 @@ name = "huggingface-hub" -version = "0.11.0" +version = "0.11.1" @@ -404 +404 @@ name = "orjson" -version = "3.8.2" +version = "3.8.3" @@ -470 +470 @@ name = "pip-audit" -version = "2.4.6" +version = "2.4.7" @@ -850,2 +850,2 @@ lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "c41aa118d317c52903d9074626aa783b8a876edc79a4c0a3f619471f175e8c39" +python-versions = "3.9.15" +content-hash = "f680ec6ec5d2a2e09adaaaf2f0f857e0bd89f22cd3d4b1de8c97f39402c872a3" @@ -966,2 +966,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, + {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, + {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, @@ -982,2 +982,2 @@ filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, + {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, @@ -1002,2 +1002,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, + {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, + {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, @@ -1120,49 +1120,44 @@ orjson = [ - {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, - {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, - {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, - {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, - {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, - {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, - {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, - {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, + {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, + {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, + {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, + {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, + {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, + {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, @@ -1195,2 +1190,2 @@ pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, + {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, + {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 286b3001..09d28453 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.5.2" +version = "0.5.3" @@ -17 +17 @@ pymongo = { extras = ["srv"], version = "^3.13.0" } -python = "3.9.6" +python = "3.9.15" diff --git a/services/admin/.python-version b/services/admin/.python-version index 1635d0f5..b326afbc 100644 --- a/services/admin/.python-version +++ b/services/admin/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index f7a3e536..a7b70180 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -3 +3 @@ -FROM python:3.9.6-slim +FROM python:3.9.15-slim diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 5af58183..6b3fdf62 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -171 +171 @@ name = "cyclonedx-python-lib" -version = "3.1.0" +version = "3.1.1" @@ -226 +226 @@ name = "filelock" -version = "3.8.0" +version = "3.8.1" @@ -233,2 +233,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -356 +356 @@ name = "libcommon" -version = "0.5.2" +version = "0.5.3" @@ -360 +360 @@ optional = false -python-versions = "==3.9.6" +python-versions = "==3.9.15" @@ -374 +374 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" @@ -462 +462 @@ name = "orjson" -version = "3.8.2" +version = "3.8.3" @@ -990,2 +990,2 @@ lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "7da2ea2d7009addc9d940eda71eee51a1990f170dd82a19ed3a0ae8a2e348550" +python-versions = "3.9.15" +content-hash = "a2bd3c8191be9b63471f3e41338400451db039c50d15c1afeb8f262fd0071457" @@ -1114,2 +1114,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, + {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, + {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, @@ -1130,2 +1130,2 @@ filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, + {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, @@ -1170 +1170 @@ libcommon = [ - {file = "libcommon-0.5.2-py3-none-any.whl", hash = "sha256:6df419dbbe249cb9572ead24a2534a43aa128ff1f43a9da9cfe480c751c6ba21"}, + {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, @@ -1275,49 +1275,44 @@ orjson = [ - {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, - {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, - {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, - {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, - {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, - {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, - {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, - {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, + {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, + {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, + {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, + {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, + {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, + {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index c38dce8a..299c5988 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10,2 +10,2 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl", develop = false } -python = "3.9.6" +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +python = "3.9.15" diff --git a/services/api/.python-version b/services/api/.python-version index 1635d0f5..b326afbc 100644 --- a/services/api/.python-version +++ b/services/api/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/services/api/Dockerfile b/services/api/Dockerfile index a514af67..c85b3d65 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -3 +3 @@ -FROM python:3.9.6-slim +FROM python:3.9.15-slim diff --git a/services/api/poetry.lock b/services/api/poetry.lock index c9cde161..2e74fd97 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -171 +171 @@ name = "cyclonedx-python-lib" -version = "3.1.0" +version = "3.1.1" @@ -226 +226 @@ name = "filelock" -version = "3.8.0" +version = "3.8.1" @@ -233,2 +233,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -299 +299 @@ name = "huggingface-hub" -version = "0.11.0" +version = "0.11.1" @@ -356 +356 @@ name = "jsonschema" -version = "4.17.1" +version = "4.17.3" @@ -372 +372 @@ name = "libcommon" -version = "0.5.2" +version = "0.5.3" @@ -376 +376 @@ optional = false -python-versions = "==3.9.6" +python-versions = "==3.9.15" @@ -390 +390 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" @@ -486 +486 @@ name = "orjson" -version = "3.8.2" +version = "3.8.3" @@ -552 +552 @@ name = "pip-audit" -version = "2.4.6" +version = "2.4.7" @@ -1032,2 +1032,2 @@ lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "a43bc3379c79e6a46a9e1c0e285dd5a1b856c40ed39fae242c72da24a5ce331e" +python-versions = "3.9.15" +content-hash = "20c63e31801f3652a9b58587ed63d240d790be6c40d313f118f21a4b4e34caca" @@ -1156,2 +1156,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, + {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, + {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, @@ -1172,2 +1172,2 @@ filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, + {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, @@ -1196,2 +1196,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, + {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, + {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, @@ -1212,2 +1212,2 @@ jsonschema = [ - {file = "jsonschema-4.17.1-py3-none-any.whl", hash = "sha256:410ef23dcdbca4eaedc08b850079179883c2ed09378bd1f760d4af4aacfa28d7"}, - {file = "jsonschema-4.17.1.tar.gz", hash = "sha256:05b2d22c83640cde0b7e0aa329ca7754fbd98ea66ad8ae24aa61328dfe057fa3"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, @@ -1216 +1216 @@ libcommon = [ - {file = "libcommon-0.5.2-py3-none-any.whl", hash = "sha256:6df419dbbe249cb9572ead24a2534a43aa128ff1f43a9da9cfe480c751c6ba21"}, + {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, @@ -1363,49 +1363,44 @@ orjson = [ - {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, - {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, - {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, - {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, - {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, - {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, - {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, - {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, + {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, + {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, + {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, + {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, + {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, + {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, @@ -1438,2 +1433,2 @@ pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, + {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, + {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index aa66597c..feb2441b 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12,2 +12,2 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl", develop = false } -python = "3.9.6" +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } +python = "3.9.15" diff --git a/tools/PythonAudit.mk b/tools/PythonAudit.mk index c7f2f6f2..831c9274 100644 --- a/tools/PythonAudit.mk +++ b/tools/PythonAudit.mk @@ -3 +3 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d')" + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d')" diff --git a/workers/datasets_based/.python-version b/workers/datasets_based/.python-version index 1635d0f5..b326afbc 100644 --- a/workers/datasets_based/.python-version +++ b/workers/datasets_based/.python-version @@ -1 +1 @@ -3.9.6 +3.9.15 diff --git a/workers/datasets_based/Dockerfile b/workers/datasets_based/Dockerfile index 984b9004..bb7850f1 100644 --- a/workers/datasets_based/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -3 +3 @@ -FROM python:3.9.6-slim +FROM python:3.9.15-slim diff --git a/workers/datasets_based/Makefile b/workers/datasets_based/Makefile index 2b78f800..482f2396 100644 --- a/workers/datasets_based/Makefile +++ b/workers/datasets_based/Makefile @@ -19 +19 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^libcommon @/,+1 d')" + bash -c "poetry run pip-audit --ignore-vuln GHSA-47fc-vmwq-366v -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+109 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" @@ -20,0 +21 @@ pip-audit: +# ^ we also ignore GHSA-47fc-vmwq-366v vulnerability because it has no fix for the moment diff --git a/workers/datasets_based/poetry.lock b/workers/datasets_based/poetry.lock index d34ad3d3..00104cf7 100644 --- a/workers/datasets_based/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -496 +496 @@ name = "filelock" -version = "3.8.0" +version = "3.8.1" @@ -503,2 +503,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] @@ -617 +617 @@ name = "google-auth" -version = "2.14.1" +version = "2.15.0" @@ -832,0 +833 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" + @@ -876 +877 @@ name = "libcommon" -version = "0.5.0" +version = "0.5.3" @@ -880 +881 @@ optional = false -python-versions = "==3.9.6" +python-versions = "==3.9.15" @@ -894 +895 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl" @@ -1041 +1042 @@ name = "multidict" -version = "6.0.2" +version = "6.0.3" @@ -1207 +1208 @@ name = "orjson" -version = "3.8.2" +version = "3.8.3" @@ -1251,0 +1253,11 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] +[[package]] +name = "pandas-stubs" +version = "1.5.2.221124" +description = "Type annotations for pandas" +category = "dev" +optional = false +python-versions = ">=3.8,<3.12" + +[package.dependencies] +types-pytz = ">=2022.1.1" + @@ -2176 +2188 @@ name = "transformers" -version = "4.24.0" +version = "4.25.1" @@ -2195 +2207 @@ accelerate = ["accelerate (>=0.10.0)"] -all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] +all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] @@ -2200,2 +2212,2 @@ deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifuls -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] +dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] @@ -2203 +2215 @@ dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)" -docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] +docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] @@ -2211,0 +2224 @@ modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.4)"] @@ -2225,2 +2238,2 @@ testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (==22.3)", "cookiecut -tf = ["onnxconverter-common", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx"] -tf-cpu = ["onnxconverter-common", "tensorflow-cpu (>=2.3)", "tensorflow-text", "tf2onnx"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] @@ -2276,0 +2290,8 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=5.2,<6.0)", "isort (>=5.0.6,<6. +[[package]] +name = "types-pytz" +version = "2022.6.0.1" +description = "Typing stubs for pytz" +category = "dev" +optional = false +python-versions = "*" + @@ -2306 +2327 @@ name = "ujson" -version = "5.5.0" +version = "5.6.0" @@ -2384 +2405 @@ name = "yarl" -version = "1.8.1" +version = "1.8.2" @@ -2422,2 +2443,2 @@ lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "4936f0f9a97645d2ec0deb9125e8db4ab7ec97254658f2de765b7fd537d544de" +python-versions = "3.9.15" +content-hash = "725e9a80c187974440610ef2c87ae0d2505dc02d8605baa8e889a0b62d3fdb97" @@ -3006,2 +3027,2 @@ filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.8.1-py3-none-any.whl", hash = "sha256:3156639b1454b5f828255abf5710f7fc1e10dac69bde3e09e6189b29a91f2505"}, + {file = "filelock-3.8.1.tar.gz", hash = "sha256:9255d3cd8de8fcb2a441444f7a4f1949ae826da36cd070dc3e0c883614b4bbad"}, @@ -3114,2 +3135,2 @@ google-auth = [ - {file = "google-auth-2.14.1.tar.gz", hash = "sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d"}, - {file = "google_auth-2.14.1-py2.py3-none-any.whl", hash = "sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016"}, + {file = "google-auth-2.15.0.tar.gz", hash = "sha256:72f12a6cfc968d754d7bdab369c5c5c16032106e52d32c6dfd8484e4c01a6d1f"}, + {file = "google_auth-2.15.0-py2.py3-none-any.whl", hash = "sha256:6897b93556d8d807ad70701bb89f000183aea366ca7ed94680828b37437a4994"}, @@ -3259 +3280 @@ libcommon = [ - {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:d6e6e8d7c500846e202eccb9e106202e07a40d80b81900369d59d437d7750784"}, + {file = "libcommon-0.5.3-py3-none-any.whl", hash = "sha256:bd80da9b2b320d8e0cf9339f89c4b64e8898e3a14e60ebec21cfee667e0cae94"}, @@ -3492,59 +3513,74 @@ multidict = [ - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, - {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, - {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, - {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, - {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, - {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, - {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, - {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, - {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, - {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, - {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, + {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37"}, + {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d"}, + {file = "multidict-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614"}, + {file = "multidict-6.0.3-cp310-cp310-win32.whl", hash = "sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba"}, + {file = "multidict-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b"}, + {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6"}, + {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418"}, + {file = "multidict-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734"}, + {file = "multidict-6.0.3-cp311-cp311-win32.whl", hash = "sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531"}, + {file = "multidict-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30"}, + {file = "multidict-6.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000"}, + {file = "multidict-6.0.3-cp37-cp37m-win32.whl", hash = "sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253"}, + {file = "multidict-6.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64"}, + {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11"}, + {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea"}, + {file = "multidict-6.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7"}, + {file = "multidict-6.0.3-cp38-cp38-win32.whl", hash = "sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0"}, + {file = "multidict-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a"}, + {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38"}, + {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3"}, + {file = "multidict-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08"}, + {file = "multidict-6.0.3-cp39-cp39-win32.whl", hash = "sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce"}, + {file = "multidict-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51"}, + {file = "multidict-6.0.3.tar.gz", hash = "sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d"}, @@ -3664,49 +3700,44 @@ orjson = [ - {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, - {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, - {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, - {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, - {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, - {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, - {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, - {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bf425bba42a8cee49d611ddd50b7fea9e87787e77bf90b2cb9742293f319480"}, + {file = "orjson-3.8.3-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:068febdc7e10655a68a381d2db714d0a90ce46dc81519a4962521a0af07697fb"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46241e63df2d39f4b7d44e2ff2becfb6646052b963afb1a99f4ef8c2a31aba0"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:961bc1dcbc3a89b52e8979194b3043e7d28ffc979187e46ad23efa8ada612d04"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ea3336c2bda31bc938785b84283118dec52eb90a2946b140054873946f60a4"}, + {file = "orjson-3.8.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:83891e9c3a172841f63cae75ff9ce78f12e4c2c5161baec7af725b1d71d4de21"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b587ec06ab7dd4fb5acf50af98314487b7d56d6e1a7f05d49d8367e0e0b23bc"}, + {file = "orjson-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37196a7f2219508c6d944d7d5ea0000a226818787dadbbed309bfa6174f0402b"}, + {file = "orjson-3.8.3-cp310-none-win_amd64.whl", hash = "sha256:94bd4295fadea984b6284dc55f7d1ea828240057f3b6a1d8ec3fe4d1ea596964"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:8fe6188ea2a1165280b4ff5fab92753b2007665804e8214be3d00d0b83b5764e"}, + {file = "orjson-3.8.3-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d30d427a1a731157206ddb1e95620925298e4c7c3f93838f53bd19f6069be244"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3497dde5c99dd616554f0dcb694b955a2dc3eb920fe36b150f88ce53e3be2a46"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc29ff612030f3c2e8d7c0bc6c74d18b76dde3726230d892524735498f29f4b2"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1612e08b8254d359f9b72c4a4099d46cdc0f58b574da48472625a0e80222b6e"}, + {file = "orjson-3.8.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:54f3ef512876199d7dacd348a0fc53392c6be15bdf857b2d67fa1b089d561b98"}, + {file = "orjson-3.8.3-cp311-none-win_amd64.whl", hash = "sha256:a30503ee24fc3c59f768501d7a7ded5119a631c79033929a5035a4c91901eac7"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:d746da1260bbe7cb06200813cc40482fb1b0595c4c09c3afffe34cfc408d0a4a"}, + {file = "orjson-3.8.3-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e570fdfa09b84cc7c42a3a6dd22dbd2177cb5f3798feefc430066b260886acae"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca61e6c5a86efb49b790c8e331ff05db6d5ed773dfc9b58667ea3b260971cfb2"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cd0bb7e843ceba759e4d4cc2ca9243d1a878dac42cdcfc2295883fbd5bd2400"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff96c61127550ae25caab325e1f4a4fba2740ca77f8e81640f1b8b575e95f784"}, + {file = "orjson-3.8.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:faf44a709f54cf490a27ccb0fb1cb5a99005c36ff7cb127d222306bf84f5493f"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:194aef99db88b450b0005406f259ad07df545e6c9632f2a64c04986a0faf2c68"}, + {file = "orjson-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa57fe8b32750a64c816840444ec4d1e4310630ecd9d1d7b3db4b45d248b5585"}, + {file = "orjson-3.8.3-cp37-none-win_amd64.whl", hash = "sha256:dbd74d2d3d0b7ac8ca968c3be51d4cfbecec65c6d6f55dabe95e975c234d0338"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef3b4c7931989eb973fbbcc38accf7711d607a2b0ed84817341878ec8effb9c5"}, + {file = "orjson-3.8.3-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:cf3dad7dbf65f78fefca0eb385d606844ea58a64fe908883a32768dfaee0b952"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbdfbd49d58cbaabfa88fcdf9e4f09487acca3d17f144648668ea6ae06cc3183"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f06ef273d8d4101948ebc4262a485737bcfd440fb83dd4b125d3e5f4226117bc"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75de90c34db99c42ee7608ff88320442d3ce17c258203139b5a8b0afb4a9b43b"}, + {file = "orjson-3.8.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:78d69020fa9cf28b363d2494e5f1f10210e8fecf49bf4a767fcffcce7b9d7f58"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b70782258c73913eb6542c04b6556c841247eb92eeace5db2ee2e1d4cb6ffaa5"}, + {file = "orjson-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:989bf5980fc8aca43a9d0a50ea0a0eee81257e812aaceb1e9c0dbd0856fc5230"}, + {file = "orjson-3.8.3-cp38-none-win_amd64.whl", hash = "sha256:52540572c349179e2a7b6a7b98d6e9320e0333533af809359a95f7b57a61c506"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f0ec0ca4e81492569057199e042607090ba48289c4f59f29bbc219282b8dc60"}, + {file = "orjson-3.8.3-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b7018494a7a11bcd04da1173c3a38fa5a866f905c138326504552231824ac9c1"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5870ced447a9fbeb5aeb90f362d9106b80a32f729a57b59c64684dbc9175e92"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0459893746dc80dbfb262a24c08fdba2a737d44d26691e85f27b2223cac8075f"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0379ad4c0246281f136a93ed357e342f24070c7055f00aeff9a69c2352e38d10"}, + {file = "orjson-3.8.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3e9e54ff8c9253d7f01ebc5836a1308d0ebe8e5c2edee620867a49556a158484"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8ff793a3188c21e646219dc5e2c60a74dde25c26de3075f4c2e33cf25835340"}, + {file = "orjson-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b0c13e05da5bc1a6b2e1d3b117cc669e2267ce0a131e94845056d506ef041c6"}, + {file = "orjson-3.8.3-cp39-none-win_amd64.whl", hash = "sha256:4fff44ca121329d62e48582850a247a487e968cfccd5527fab20bd5b650b78c3"}, + {file = "orjson-3.8.3.tar.gz", hash = "sha256:eda1534a5289168614f21422861cbfb1abb8a82d66c00a8ba823d863c0797178"}, @@ -3750,0 +3782,4 @@ pandas = [ +pandas-stubs = [ + {file = "pandas-stubs-1.5.2.221124.tar.gz", hash = "sha256:d6bab9f373ff3c309bf560065d230a38ce4dcd22368be393fad6eb353d102b7c"}, + {file = "pandas_stubs-1.5.2.221124-py3-none-any.whl", hash = "sha256:5a2c47a0cf8e12e113d760d5da9c48daa2b977b14a4c368b8bbff27dbfcfd2bb"}, +] @@ -4820,2 +4855,2 @@ transformers = [ - {file = "transformers-4.24.0-py3-none-any.whl", hash = "sha256:b7ab50039ef9bf817eff14ab974f306fd20a72350bdc9df3a858fd009419322e"}, - {file = "transformers-4.24.0.tar.gz", hash = "sha256:486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b"}, + {file = "transformers-4.25.1-py3-none-any.whl", hash = "sha256:60f1be15e17e4a54373c787c713ec149dabcc63464131ac45611618fe7c2016e"}, + {file = "transformers-4.25.1.tar.gz", hash = "sha256:6dad398b792d45dc04e9ee7e9e06bf758ab19dca2efc119065e661bb0f8f843b"}, @@ -4859,0 +4895,4 @@ typer = [ +types-pytz = [ + {file = "types-pytz-2022.6.0.1.tar.gz", hash = "sha256:d078196374d1277e9f9984d49373ea043cf2c64d5d5c491fbc86c258557bd46f"}, + {file = "types_pytz-2022.6.0.1-py3-none-any.whl", hash = "sha256:bea605ce5d5a5d52a8e1afd7656c9b42476e18a0f888de6be91587355313ddf4"}, +] @@ -4873,65 +4912,65 @@ ujson = [ - {file = "ujson-5.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff4928dc1e9704b567171c16787238201fdbf023665573c12c02146fe1e02eec"}, - {file = "ujson-5.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1dc2f46c31ef22b0aaa28cd71be897bea271e700636658d573df9c43c49ebbd0"}, - {file = "ujson-5.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6019e3480d933d3698f2ecb4b46d64bfadd64e718f04fac36e681f3254b49a93"}, - {file = "ujson-5.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5179088ef6487c475604b7898731a6ddeeada7702cfb2162155b016703a8475"}, - {file = "ujson-5.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c04ae27e076d81a3839047d8eed57c1e17e361640616fd520d752375e3ba8f0c"}, - {file = "ujson-5.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:60a4b481978ea2aad8fe8af1ecc271624d01b3cf4b09e9b643dd2fe19c07634c"}, - {file = "ujson-5.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a09d203983104918c62f2eef9406f24c355511f9217967df23e70fa7f5b54ff"}, - {file = "ujson-5.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b9812638d7aa8ecda2e8e1513fb4da999249603bffab7439a5f8f0bb362b0db"}, - {file = "ujson-5.5.0-cp310-cp310-win32.whl", hash = "sha256:33cd9084fefc74cbacf88c92fd260b61211e00bcde38d640c369e5dc34a2b4e1"}, - {file = "ujson-5.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:765d46f3d5e7a1d48075035e2d1a9164f683e3fccde834ca04602e6c588835bc"}, - {file = "ujson-5.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:278aa9d7cb56435c96d19f5d702e026bcf69f824e24b41e9b52706abd3565837"}, - {file = "ujson-5.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9585892091ae86045135d6a6129a644142d6a51b23e1428bb5de6d10bc0ce0c7"}, - {file = "ujson-5.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cdc46859024501c20ab74ad542cdf2f08b94b5ce384f2f569483fa3ed926d04"}, - {file = "ujson-5.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5bea13c73f36c4346808df3fa806596163a7962b6d28001ca2a391cab856089"}, - {file = "ujson-5.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3f4240d99d55eb97cb012e9adf401f5ed9cd827af0341ac44603832202b0d2"}, - {file = "ujson-5.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d93940664a5ccfd79f72dcb939b0c31a3479889f14f0eb95ec52976f8c0cae7d"}, - {file = "ujson-5.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:880c84ce59f49776cf120f77e7ca04877c97c6887917078dbc369eb47004d7cf"}, - {file = "ujson-5.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:977bf5be704a88d46bf5b228df8b44521b1f3119d741062191608b3a6a38f224"}, - {file = "ujson-5.5.0-cp311-cp311-win32.whl", hash = "sha256:e0b36257dc90194784531c3b922d8d31fb2b4d8e5adfd27aff4eee7174176365"}, - {file = "ujson-5.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:a34a5f034b339f69ef7f6a134c22d04b92e07b6ddc1dd65382e7e4ec65d6437d"}, - {file = "ujson-5.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f26544bc10c83a2ff9aa2e093500c1b473f327faae31fb468d591e5823333376"}, - {file = "ujson-5.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fd797a4837ba10671954e7c09010cec7aca67e09d193f4920a16beea5f66f65"}, - {file = "ujson-5.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d7cfac2547c93389fa303fc0c0eb6698825564e8389c41c9b60009c746207b6"}, - {file = "ujson-5.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4875cafc9a6482c04c7df52a725d1c41beb74913c0ff4ec8f189f1954a2afe9"}, - {file = "ujson-5.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0762a4fdf86e01f3f8d8b6b7158d01fdd870799ff3f402b676e358fcd879e7eb"}, - {file = "ujson-5.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6c7ae6e0778ab9610f5e80e0595957d101ab8de18c32a8c053a19943ef4831d0"}, - {file = "ujson-5.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:94874584b733a18b310b0e954d53168e62cd4a0fd9db85b1903f0902a7eb33e8"}, - {file = "ujson-5.5.0-cp37-cp37m-win32.whl", hash = "sha256:3b74467564814fbce322427a5664e6bcc7dae6dbc8acbef76300fe43ca4072ab"}, - {file = "ujson-5.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:59cdcd934385f36e8bd76aedc234371cc75c848d95bdce804ac8aa8744cfeffa"}, - {file = "ujson-5.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2e506ecf89b6b9d304362ccef770831ec242a52c89dab1b4aabf1ab0eb1d5ed6"}, - {file = "ujson-5.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10095160dbe6bba8059ad6677a01da251431f4c68041bf796dcac0956b34f8f7"}, - {file = "ujson-5.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5035bb997d163f346c22abcec75190e7e756a5349e7c708bd3d5fd7066a9a854"}, - {file = "ujson-5.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d12f2d2df195c8c4e49d2cdbad640353a856c62ca2c624d8b47aa33b65a2a2"}, - {file = "ujson-5.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a485117f97312bef45f5d79d2ff97eff4da503b8a04f3691f59d31141686459"}, - {file = "ujson-5.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:21678d7e068707e4d54bdfeb8c250ebc548b51e499aed778b22112ca31a79669"}, - {file = "ujson-5.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a9b1320d8363a42d857fae8065a2174d38217cdd58cd8dc4f48d54e0591271e"}, - {file = "ujson-5.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:701e81e047f5c0cffd4ac828efca68b0bd270c616654966a051e9a5f836b385e"}, - {file = "ujson-5.5.0-cp38-cp38-win32.whl", hash = "sha256:1cef44ea4973344baed3d50a5da4a8843de3a6af7dea7fadf0a594e53ce5892f"}, - {file = "ujson-5.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e510d288e613d6927796dfb728e13e4530fc83b9ccac5888a21f7860486eab21"}, - {file = "ujson-5.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e1135264bcd40965cd35b0869e36952f54825024befdc7a923df9a7d83cfd800"}, - {file = "ujson-5.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:703fd69d9cb21d6ec2086789df9be2cf8140a76ff127050c24007ea8940dcd3b"}, - {file = "ujson-5.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:849f2ff40264152f25589cb48ddb4a43d14db811f841ec73989bfc0c8c4853fa"}, - {file = "ujson-5.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf416a93e1331820c77e3429df26946dbd4fe105e9b487cd2d1b7298b75784a8"}, - {file = "ujson-5.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:593a0f6fb0e186c5ba65465ed6f6215a30d1efa898c25e74de1c8577a1bff6d0"}, - {file = "ujson-5.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c20cc83b0df47129ec6ed8a47fa7dcfc309c5bad029464004162738502568bb"}, - {file = "ujson-5.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6f83be8257b2f2dd6dea5ee62cd28db90584da7a7af1fba77a2102fc7943638a"}, - {file = "ujson-5.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8141f654432cf75144d6103bfac2286b8adf23467201590b173a74535d6be22d"}, - {file = "ujson-5.5.0-cp39-cp39-win32.whl", hash = "sha256:3fe1aea596f9539fc20cd9e52f098c842afc090168824fd4ca9744fe13151a03"}, - {file = "ujson-5.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a655f7b755cfc5c07f2116b6dcf0ba148c89adef9a6d40c1b0f1fada878c4345"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f19f11055ba2961eb39bdb1ff15763a53fca4fa0b5b624da3c7a528e83cdd09c"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d87c817b292efb748f1974f37e8bb8a8772ef92f05f84e507159360814bcc3f"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9681ec4c60d0da590552427d770636d9079038c30b265f507ccde23caa7823"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63d1ae1ca17bb2c847e298c7bcf084a73d56d434b4c50509fb93a4b4300b0b2"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:95603eff711b8f3b9596e1c961dbeb745a792ba1904141612f194e07edd71e5f"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2d90414e3b4b44b39825049185959488e084ea7fcaf6124afd5c00893938b09d"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7471d4486f23518cff343f1eec6c68d1b977ed74c3e6cc3e1ac896b9b7d68645"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee9a2c9a4b2421e77f8fe33ed0621dea03c66c710707553020b1e32f3afb6240"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a8cb3c8637006c5bd8237ebb5992a76ba06e39988ad5cff2096227443e8fd6a"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d9c89c521dc90c7564358e525f849b93ad1d710553c1491f66b8cce8113bc901"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ab011e3556a9a1d9461bd686870c527327765ed02fe53550531d6609a8a33ff"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603607f56a0ee84d9cd2c7e9b1d29b18a70684b94ee34f07b9ffe8dc9c8a9f81"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75bef34e69e7effb7b4849e3f830e3174d2cc6ec7273503fdde111c222dc9b3"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abfe83e082c9208891e2158c1b5044a650ecec408b823bf6bf16cd7f8085cafa"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4ef4ab8352861b99bd7fedb1fc6df3ea7f7d5216c789ba6d859e4ea06f1a4c45"}, - {file = "ujson-5.5.0.tar.gz", hash = "sha256:b25077a971c7da47bd6846a912a747f6963776d90720c88603b1b55d81790780"}, + {file = "ujson-5.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e"}, + {file = "ujson-5.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7"}, + {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3"}, + {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e"}, + {file = "ujson-5.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f"}, + {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9"}, + {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b"}, + {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059"}, + {file = "ujson-5.6.0-cp310-cp310-win32.whl", hash = "sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f"}, + {file = "ujson-5.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169"}, + {file = "ujson-5.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56"}, + {file = "ujson-5.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82"}, + {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc"}, + {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6"}, + {file = "ujson-5.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275"}, + {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a"}, + {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c"}, + {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679"}, + {file = "ujson-5.6.0-cp311-cp311-win32.whl", hash = "sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490"}, + {file = "ujson-5.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7"}, + {file = "ujson-5.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e"}, + {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851"}, + {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf"}, + {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085"}, + {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035"}, + {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b"}, + {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458"}, + {file = "ujson-5.6.0-cp37-cp37m-win32.whl", hash = "sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b"}, + {file = "ujson-5.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf"}, + {file = "ujson-5.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a"}, + {file = "ujson-5.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847"}, + {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888"}, + {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b"}, + {file = "ujson-5.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555"}, + {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e"}, + {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d"}, + {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522"}, + {file = "ujson-5.6.0-cp38-cp38-win32.whl", hash = "sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e"}, + {file = "ujson-5.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e"}, + {file = "ujson-5.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99"}, + {file = "ujson-5.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2"}, + {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab"}, + {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d"}, + {file = "ujson-5.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983"}, + {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6"}, + {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709"}, + {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c"}, + {file = "ujson-5.6.0-cp39-cp39-win32.whl", hash = "sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490"}, + {file = "ujson-5.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74"}, + {file = "ujson-5.6.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057"}, + {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405"}, + {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60"}, + {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6"}, + {file = "ujson-5.6.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5"}, + {file = "ujson-5.6.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77"}, + {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10"}, + {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a"}, + {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804"}, + {file = "ujson-5.6.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7"}, + {file = "ujson-5.6.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570"}, + {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6"}, + {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a"}, + {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f"}, + {file = "ujson-5.6.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765"}, + {file = "ujson-5.6.0.tar.gz", hash = "sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04"}, @@ -5106,59 +5145,74 @@ yarl = [ - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, - {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, - {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, - {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, - {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, - {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, - {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, - {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, - {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, - {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, - {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, diff --git a/workers/datasets_based/pyproject.toml b/workers/datasets_based/pyproject.toml index d6539905..cdeca95c 100644 --- a/workers/datasets_based/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.3-py3-none-any.whl", develop = false } @@ -28 +28 @@ pydub = "^0.25.1" -python = "3.9.6" +python = "3.9.15" @@ -45,0 +46 @@ mypy = "0.812" +pandas-stubs = "^1.5.2.221124" diff --git a/workers/datasets_based/tests/fixtures/datasets.py b/workers/datasets_based/tests/fixtures/datasets.py index fde51675..62ce66ef 100644 --- a/workers/datasets_based/tests/fixtures/datasets.py +++ b/workers/datasets_based/tests/fixtures/datasets.py @@ -9 +9 @@ import numpy as np -import pandas as pd # type: ignore +import pandas as pd
080da2efe810cccb401886f6c24e013ce20344af
Sylvain Lesage
2022-12-01T12:29:32
Merge the workers that rely on the datasets library (#656)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 85b34cd1..5e52189f 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -58,2 +58 @@ jobs: - IMAGE_WORKER_SPLITS: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.workers.splits}}" - IMAGE_WORKER_FIRST_ROWS: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.workers.firstRows}}" + IMAGE_WORKER_DATASETS_BASED: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.workers.datasets_based}}" diff --git a/.github/workflows/w-first_rows-build-docker.yml b/.github/workflows/w-datasets_based-build-docker.yml similarity index 59% rename from .github/workflows/w-first_rows-build-docker.yml rename to .github/workflows/w-datasets_based-build-docker.yml index 41625975..32aa1007 100644 --- a/.github/workflows/w-first_rows-build-docker.yml +++ b/.github/workflows/w-datasets_based-build-docker.yml @@ -4 +4 @@ -name: workers/first_rows +name: workers/datasets_based @@ -9,5 +9,5 @@ on: - - 'workers/first_rows/Dockerfile' - - 'workers/first_rows/src/**' - - 'workers/first_rows/poetry.lock' - - 'workers/first_rows/pyproject.toml' - - '.github/workflows/w-workers/first_rows-build-docker.yml' + - 'workers/datasets_based/Dockerfile' + - 'workers/datasets_based/src/**' + - 'workers/datasets_based/poetry.lock' + - 'workers/datasets_based/pyproject.toml' + - '.github/workflows/w-datasets_based-build-docker.yml' @@ -21 +21 @@ jobs: - project: first_rows + project: datasets_based diff --git a/.github/workflows/w-first_rows.yml b/.github/workflows/w-datasets_based.yml similarity index 70% rename from .github/workflows/w-first_rows.yml rename to .github/workflows/w-datasets_based.yml index fab82e17..beb0c3af 100644 --- a/.github/workflows/w-first_rows.yml +++ b/.github/workflows/w-datasets_based.yml @@ -4 +4 @@ -name: workers/first_rows +name: workers/datasets_based @@ -9,2 +9,2 @@ on: - - 'workers/first_rows/**' - - '.github/workflows/w-first_rows.yml' + - 'workers/datasets_based/**' + - '.github/workflows/w-datasets_based.yml' @@ -19 +19 @@ jobs: - working-directory: workers/first_rows + working-directory: workers/datasets_based @@ -24 +24 @@ jobs: - working-directory: workers/first_rows + working-directory: workers/datasets_based diff --git a/.github/workflows/w-splits-build-docker.yml b/.github/workflows/w-splits-build-docker.yml deleted file mode 100644 index 5a72a6dd..00000000 --- a/.github/workflows/w-splits-build-docker.yml +++ /dev/null @@ -1,24 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: workers/splits -on: - workflow_dispatch: - push: - paths: - - 'workers/splits/Dockerfile' - - 'workers/splits/src/**' - - 'workers/splits/poetry.lock' - - 'workers/splits/pyproject.toml' - - '.github/workflows/w-workers/splits-build-docker.yml' - - '.github/workflows/_build_push_docker_hub.yml' - - 'vendors/' -jobs: - docker: - uses: ./.github/workflows/_build_push_docker_hub.yml - with: - directory: workers - project: splits - secrets: - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_PASSWORD }} diff --git a/.github/workflows/w-splits.yml b/.github/workflows/w-splits.yml deleted file mode 100644 index f95dba9a..00000000 --- a/.github/workflows/w-splits.yml +++ /dev/null @@ -1,25 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: workers/splits -on: - workflow_dispatch: - push: - paths: - - 'workers/splits/**' - - '.github/workflows/w-splits.yml' - - '.github/workflows/_quality-python.yml' - - '.github/workflows/_unit-tests-python.yml' - - 'tools/docker-compose-mongo.yml' - - 'vendors/' -jobs: - quality: - uses: ./.github/workflows/_quality-python.yml - with: - working-directory: workers/splits - is-datasets-worker: true - unit-tests: - uses: ./.github/workflows/_unit-tests-python.yml - with: - working-directory: workers/splits - is-datasets-worker: true diff --git a/.vscode/monorepo.code-workspace b/.vscode/monorepo.code-workspace index 7ddb509d..b65964e1 100644 --- a/.vscode/monorepo.code-workspace +++ b/.vscode/monorepo.code-workspace @@ -32,10 +32,2 @@ - "name": "workers/first_rows", - "path": "../workers/first_rows" - }, - { - "name": "workers/parquet", - "path": "../workers/parquet" - }, - { - "name": "workers/splits", - "path": "../workers/splits" + "name": "workers/datasets_based", + "path": "../workers/datasets_based" diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index e9c0b582..90d250ab 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -160 +160 @@ GITHUB_TOKEN=xxx -To install the [worker service](./services/worker) on Mac OS, you can follow the next steps. +To install the [datasets based worker](./workers/datasets_based) on Mac OS, you can follow the next steps. @@ -223 +223 @@ Check that the expected local version of Python is used: -$ cd services/workers +$ cd workers/datasets_based diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 2566e41d..e544e4f7 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12,2 +12 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-5b9a872", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-5b9a872" + "datasets_based": "huggingface/datasets-server-workers-datasets_based:sha-d6a0b1e" diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index c6596d45..a481b6c4 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -8,0 +9,3 @@ + - name: DATASETS_BASED_ENDPOINT + value: "/first-rows" + # ^ hard-coded diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 729932e6..86fb4c18 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -6 +6 @@ - image: {{ .Values.dockerImage.workers.splits }} + image: {{ .Values.dockerImage.workers.datasets_based }} @@ -8,0 +9,3 @@ + - name: DATASETS_BASED_ENDPOINT + value: "/splits" + # ^ hard-coded diff --git a/chart/templates/worker/splits/deployment.yaml b/chart/templates/worker/splits/deployment.yaml index fb1cd610..526b36f6 100644 --- a/chart/templates/worker/splits/deployment.yaml +++ b/chart/templates/worker/splits/deployment.yaml @@ -23,0 +24 @@ spec: + {{ include "initContainerAssets" . | nindent 8 }} diff --git a/tools/DockerRemoteImages.mk b/tools/DockerRemoteImages.mk index b28d0ef0..232fb14d 100644 --- a/tools/DockerRemoteImages.mk +++ b/tools/DockerRemoteImages.mk @@ -4,2 +4 @@ export IMAGE_SERVICE_API := $(shell jq -r '.dockerImage.services.api' ${DOCKER_I -export IMAGE_WORKER_SPLITS := $(shell jq -r '.dockerImage.workers.splits' ${DOCKER_IMAGES}) -export IMAGE_WORKER_FIRST_ROWS := $(shell jq -r '.dockerImage.workers.firstRows' ${DOCKER_IMAGES}) +export IMAGE_WORKER_DATASETS_BASED := $(shell jq -r '.dockerImage.workers.datasets_based' ${DOCKER_IMAGES}) diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml index 8cbefbea..396d394f 100644 --- a/tools/docker-compose-datasets-server.yml +++ b/tools/docker-compose-datasets-server.yml @@ -76,2 +76,2 @@ services: - # dockerfile: workers/splits/Dockerfile - image: ${IMAGE_WORKER_SPLITS?IMAGE_WORKER_SPLITS env var must be provided} + # dockerfile: workers/datasets_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -85,0 +86 @@ services: + DATASETS_BASED_ENDPOINT: "/splits" # hard-coded @@ -96,2 +97,2 @@ services: - # dockerfile: workers/first-rows/Dockerfile - image: ${IMAGE_WORKER_FIRST_ROWS?IMAGE_WORKER_FIRST_ROWS env var must be provided} + # dockerfile: workers/datasets_based/Dockerfile + image: ${IMAGE_WORKER_DATASETS_BASED?IMAGE_WORKER_DATASETS_BASED env var must be provided} @@ -105,0 +107 @@ services: + DATASETS_BASED_ENDPOINT: "/first-rows" # hard-coded diff --git a/workers/first_rows/.flake8 b/workers/datasets_based/.flake8 similarity index 100% rename from workers/first_rows/.flake8 rename to workers/datasets_based/.flake8 diff --git a/workers/first_rows/.python-version b/workers/datasets_based/.python-version similarity index 100% rename from workers/first_rows/.python-version rename to workers/datasets_based/.python-version diff --git a/workers/first_rows/Dockerfile b/workers/datasets_based/Dockerfile similarity index 65% rename from workers/first_rows/Dockerfile rename to workers/datasets_based/Dockerfile index 130b4aca..984b9004 100644 --- a/workers/first_rows/Dockerfile +++ b/workers/datasets_based/Dockerfile @@ -27,5 +27,5 @@ COPY libs/libcommon/dist ./libs/libcommon/dist -COPY workers/first_rows/src ./workers/first_rows/src -COPY workers/first_rows/poetry.lock ./workers/first_rows/poetry.lock -COPY workers/first_rows/pyproject.toml ./workers/first_rows/pyproject.toml -COPY vendors ./vendors/ -WORKDIR /src/workers/first_rows/ +COPY workers/datasets_based/src ./workers/datasets_based/src +COPY workers/datasets_based/poetry.lock ./workers/datasets_based/poetry.lock +COPY workers/datasets_based/pyproject.toml ./workers/datasets_based/pyproject.toml +COPY workers/datasets_based/vendors ./workers/datasets_based/vendors/ +WORKDIR /src/workers/datasets_based/ @@ -34 +34 @@ RUN poetry install -ENTRYPOINT ["poetry", "run", "python", "src/first_rows/main.py"] +ENTRYPOINT ["poetry", "run", "python", "src/datasets_based/main.py"] diff --git a/workers/first_rows/Makefile b/workers/datasets_based/Makefile similarity index 86% rename from workers/first_rows/Makefile rename to workers/datasets_based/Makefile index d4914da4..2b78f800 100644 --- a/workers/first_rows/Makefile +++ b/workers/datasets_based/Makefile @@ -2,2 +2,2 @@ -export COMPOSE_PROJECT_NAME := first_rows -export MONGO_PORT := 27041 +export COMPOSE_PROJECT_NAME := datasets_based +export MONGO_PORT := 27040 @@ -15 +15 @@ run: - poetry run python src/first_rows/main.py + poetry run python src/datasets_based/main.py diff --git a/workers/first_rows/README.md b/workers/datasets_based/README.md similarity index 75% rename from workers/first_rows/README.md rename to workers/datasets_based/README.md index 7436a5d0..1da8d12a 100644 --- a/workers/first_rows/README.md +++ b/workers/datasets_based/README.md @@ -1 +1 @@ -# Datasets server - first_rows +# Datasets server - worker @@ -3 +3 @@ -> Worker that pre-computes and caches the response to /first-rows +> Worker that pre-computes and caches the response to /splits @@ -8,0 +9,6 @@ The worker can be configured using environment variables. They are grouped by sc +### Datasets based worker + +The same worker is used for different endpoints to reuse shared code and dependencies. But at runtime, the worker is assigned only one endpoint. The endpoint is configured using the `DATASETS_BASED_ENDPOINT` environment variable: + +- `DATASETS_BASED_ENDPOINT`: the endpoint on which the worker will work (pre-compute and cache the response). It can only be `/splits` at the moment. + @@ -11 +17 @@ The worker can be configured using environment variables. They are grouped by sc -Set environment variables to configure the first rows worker (`FIRST_ROWS_` prefix): +Only needed when the `DATASETS_BASED_ENDPOINT` is set to `/first-rows`: set environment variables to configure the first rows worker (`FIRST_ROWS_` prefix): diff --git a/workers/first_rows/poetry.lock b/workers/datasets_based/poetry.lock similarity index 97% rename from workers/first_rows/poetry.lock rename to workers/datasets_based/poetry.lock index 5bf45816..d34ad3d3 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/datasets_based/poetry.lock @@ -352 +352 @@ name = "cyclonedx-python-lib" -version = "3.1.0" +version = "3.1.1" @@ -580 +580 @@ name = "gdown" -version = "4.5.3" +version = "4.5.4" @@ -663 +663 @@ name = "grpcio" -version = "1.50.0" +version = "1.51.1" @@ -669,3 +668,0 @@ python-versions = ">=3.7" -[package.dependencies] -six = ">=1.5.2" - @@ -673 +670 @@ six = ">=1.5.2" -protobuf = ["grpcio-tools (>=1.50.0)"] +protobuf = ["grpcio-tools (>=1.51.1)"] @@ -735 +732 @@ name = "huggingface-hub" -version = "0.11.0" +version = "0.11.1" @@ -836 +832,0 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" - @@ -1305 +1301 @@ name = "pip-audit" -version = "2.4.6" +version = "2.4.7" @@ -1513 +1509 @@ name = "pycryptodomex" -version = "3.15.0" +version = "3.16.0" @@ -2254 +2250 @@ type = "directory" -url = "../../vendors/trec-car-tools/python3" +url = "vendors/trec-car-tools/python3" @@ -2400 +2396 @@ name = "zipp" -version = "3.10.0" +version = "3.11.0" @@ -2427 +2423 @@ python-versions = "3.9.6" -content-hash = "152c9759656a3e25a2a16751584007e49545a1a227320371fec492a39e29ae59" +content-hash = "4936f0f9a97645d2ec0deb9125e8db4ab7ec97254658f2de765b7fd537d544de" @@ -2953,2 +2949,2 @@ cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, + {file = "cyclonedx_python_lib-3.1.1-py3-none-any.whl", hash = "sha256:a03b8f79f23aa95d37180b5d7bca81ef393b569e2d29e02f4817cfe4488e1ba2"}, + {file = "cyclonedx_python_lib-3.1.1.tar.gz", hash = "sha256:48ae942a892e8385f4e0193d2e295a338df9ab864652081406c26f58085d2b35"}, @@ -3106 +3102,2 @@ gdown = [ - {file = "gdown-4.5.3.tar.gz", hash = "sha256:6cbf7dd4108588c734aa588131d8e1d52e64f0873870f71f74cbac195f0c60ef"}, + {file = "gdown-4.5.4-py3-none-any.whl", hash = "sha256:99b99d537eb6dee3cfffeafb73e5558c347ca2a97a59864c100e6c0bb5f42a95"}, + {file = "gdown-4.5.4.tar.gz", hash = "sha256:6aff67d1eb22fb3a5aed2b4563794aa3506c72df083f86b1ec493252709ca68f"}, @@ -3130,45 +3127,45 @@ grpcio = [ - {file = "grpcio-1.50.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:906f4d1beb83b3496be91684c47a5d870ee628715227d5d7c54b04a8de802974"}, - {file = "grpcio-1.50.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:2d9fd6e38b16c4d286a01e1776fdf6c7a4123d99ae8d6b3f0b4a03a34bf6ce45"}, - {file = "grpcio-1.50.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4b123fbb7a777a2fedec684ca0b723d85e1d2379b6032a9a9b7851829ed3ca9a"}, - {file = "grpcio-1.50.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2f77a90ba7b85bfb31329f8eab9d9540da2cf8a302128fb1241d7ea239a5469"}, - {file = "grpcio-1.50.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eea18a878cffc804506d39c6682d71f6b42ec1c151d21865a95fae743fda500"}, - {file = "grpcio-1.50.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b71916fa8f9eb2abd93151fafe12e18cebb302686b924bd4ec39266211da525"}, - {file = "grpcio-1.50.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:95ce51f7a09491fb3da8cf3935005bff19983b77c4e9437ef77235d787b06842"}, - {file = "grpcio-1.50.0-cp310-cp310-win32.whl", hash = "sha256:f7025930039a011ed7d7e7ef95a1cb5f516e23c5a6ecc7947259b67bea8e06ca"}, - {file = "grpcio-1.50.0-cp310-cp310-win_amd64.whl", hash = "sha256:05f7c248e440f538aaad13eee78ef35f0541e73498dd6f832fe284542ac4b298"}, - {file = "grpcio-1.50.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:ca8a2254ab88482936ce941485c1c20cdeaef0efa71a61dbad171ab6758ec998"}, - {file = "grpcio-1.50.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3b611b3de3dfd2c47549ca01abfa9bbb95937eb0ea546ea1d762a335739887be"}, - {file = "grpcio-1.50.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a4cd8cb09d1bc70b3ea37802be484c5ae5a576108bad14728f2516279165dd7"}, - {file = "grpcio-1.50.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:156f8009e36780fab48c979c5605eda646065d4695deea4cfcbcfdd06627ddb6"}, - {file = "grpcio-1.50.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de411d2b030134b642c092e986d21aefb9d26a28bf5a18c47dd08ded411a3bc5"}, - {file = "grpcio-1.50.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d144ad10eeca4c1d1ce930faa105899f86f5d99cecfe0d7224f3c4c76265c15e"}, - {file = "grpcio-1.50.0-cp311-cp311-win32.whl", hash = "sha256:92d7635d1059d40d2ec29c8bf5ec58900120b3ce5150ef7414119430a4b2dd5c"}, - {file = "grpcio-1.50.0-cp311-cp311-win_amd64.whl", hash = "sha256:ce8513aee0af9c159319692bfbf488b718d1793d764798c3d5cff827a09e25ef"}, - {file = "grpcio-1.50.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:8e8999a097ad89b30d584c034929f7c0be280cd7851ac23e9067111167dcbf55"}, - {file = "grpcio-1.50.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:a50a1be449b9e238b9bd43d3857d40edf65df9416dea988929891d92a9f8a778"}, - {file = "grpcio-1.50.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:cf151f97f5f381163912e8952eb5b3afe89dec9ed723d1561d59cabf1e219a35"}, - {file = "grpcio-1.50.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a23d47f2fc7111869f0ff547f771733661ff2818562b04b9ed674fa208e261f4"}, - {file = "grpcio-1.50.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84d04dec64cc4ed726d07c5d17b73c343c8ddcd6b59c7199c801d6bbb9d9ed1"}, - {file = "grpcio-1.50.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:67dd41a31f6fc5c7db097a5c14a3fa588af54736ffc174af4411d34c4f306f68"}, - {file = "grpcio-1.50.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d4c8e73bf20fb53fe5a7318e768b9734cf122fe671fcce75654b98ba12dfb75"}, - {file = "grpcio-1.50.0-cp37-cp37m-win32.whl", hash = "sha256:7489dbb901f4fdf7aec8d3753eadd40839c9085967737606d2c35b43074eea24"}, - {file = "grpcio-1.50.0-cp37-cp37m-win_amd64.whl", hash = "sha256:531f8b46f3d3db91d9ef285191825d108090856b3bc86a75b7c3930f16ce432f"}, - {file = "grpcio-1.50.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:d534d169673dd5e6e12fb57cc67664c2641361e1a0885545495e65a7b761b0f4"}, - {file = "grpcio-1.50.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:1d8d02dbb616c0a9260ce587eb751c9c7dc689bc39efa6a88cc4fa3e9c138a7b"}, - {file = "grpcio-1.50.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:baab51dcc4f2aecabf4ed1e2f57bceab240987c8b03533f1cef90890e6502067"}, - {file = "grpcio-1.50.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40838061e24f960b853d7bce85086c8e1b81c6342b1f4c47ff0edd44bbae2722"}, - {file = "grpcio-1.50.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:931e746d0f75b2a5cff0a1197d21827a3a2f400c06bace036762110f19d3d507"}, - {file = "grpcio-1.50.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15f9e6d7f564e8f0776770e6ef32dac172c6f9960c478616c366862933fa08b4"}, - {file = "grpcio-1.50.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a4c23e54f58e016761b576976da6a34d876420b993f45f66a2bfb00363ecc1f9"}, - {file = "grpcio-1.50.0-cp38-cp38-win32.whl", hash = "sha256:3e4244c09cc1b65c286d709658c061f12c61c814be0b7030a2d9966ff02611e0"}, - {file = "grpcio-1.50.0-cp38-cp38-win_amd64.whl", hash = "sha256:8e69aa4e9b7f065f01d3fdcecbe0397895a772d99954bb82eefbb1682d274518"}, - {file = "grpcio-1.50.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:af98d49e56605a2912cf330b4627e5286243242706c3a9fa0bcec6e6f68646fc"}, - {file = "grpcio-1.50.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:080b66253f29e1646ac53ef288c12944b131a2829488ac3bac8f52abb4413c0d"}, - {file = "grpcio-1.50.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ab5d0e3590f0a16cb88de4a3fa78d10eb66a84ca80901eb2c17c1d2c308c230f"}, - {file = "grpcio-1.50.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb11464f480e6103c59d558a3875bd84eed6723f0921290325ebe97262ae1347"}, - {file = "grpcio-1.50.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e07fe0d7ae395897981d16be61f0db9791f482f03fee7d1851fe20ddb4f69c03"}, - {file = "grpcio-1.50.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d75061367a69808ab2e84c960e9dce54749bcc1e44ad3f85deee3a6c75b4ede9"}, - {file = "grpcio-1.50.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ae23daa7eda93c1c49a9ecc316e027ceb99adbad750fbd3a56fa9e4a2ffd5ae0"}, - {file = "grpcio-1.50.0-cp39-cp39-win32.whl", hash = "sha256:177afaa7dba3ab5bfc211a71b90da1b887d441df33732e94e26860b3321434d9"}, - {file = "grpcio-1.50.0-cp39-cp39-win_amd64.whl", hash = "sha256:ea8ccf95e4c7e20419b7827aa5b6da6f02720270686ac63bd3493a651830235c"}, - {file = "grpcio-1.50.0.tar.gz", hash = "sha256:12b479839a5e753580b5e6053571de14006157f2ef9b71f38c56dc9b23b95ad6"}, + {file = "grpcio-1.51.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:cc2bece1737b44d878cc1510ea04469a8073dbbcdd762175168937ae4742dfb3"}, + {file = "grpcio-1.51.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:e223a9793522680beae44671b9ed8f6d25bbe5ddf8887e66aebad5e0686049ef"}, + {file = "grpcio-1.51.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:24ac1154c4b2ab4a0c5326a76161547e70664cd2c39ba75f00fc8a2170964ea2"}, + {file = "grpcio-1.51.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4ef09f8997c4be5f3504cefa6b5c6cc3cf648274ce3cede84d4342a35d76db6"}, + {file = "grpcio-1.51.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8a0b77e992c64880e6efbe0086fe54dfc0bbd56f72a92d9e48264dcd2a3db98"}, + {file = "grpcio-1.51.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:eacad297ea60c72dd280d3353d93fb1dcca952ec11de6bb3c49d12a572ba31dd"}, + {file = "grpcio-1.51.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:16c71740640ba3a882f50b01bf58154681d44b51f09a5728180a8fdc66c67bd5"}, + {file = "grpcio-1.51.1-cp310-cp310-win32.whl", hash = "sha256:29cb97d41a4ead83b7bcad23bdb25bdd170b1e2cba16db6d3acbb090bc2de43c"}, + {file = "grpcio-1.51.1-cp310-cp310-win_amd64.whl", hash = "sha256:9ff42c5620b4e4530609e11afefa4a62ca91fa0abb045a8957e509ef84e54d30"}, + {file = "grpcio-1.51.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bc59f7ba87972ab236f8669d8ca7400f02a0eadf273ca00e02af64d588046f02"}, + {file = "grpcio-1.51.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3c2b3842dcf870912da31a503454a33a697392f60c5e2697c91d133130c2c85d"}, + {file = "grpcio-1.51.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b011674090594f1f3245960ced7386f6af35485a38901f8afee8ad01541dbd"}, + {file = "grpcio-1.51.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d680356a975d9c66a678eb2dde192d5dc427a7994fb977363634e781614f7c"}, + {file = "grpcio-1.51.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:094e64236253590d9d4075665c77b329d707b6fca864dd62b144255e199b4f87"}, + {file = "grpcio-1.51.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:257478300735ce3c98d65a930bbda3db172bd4e00968ba743e6a1154ea6edf10"}, + {file = "grpcio-1.51.1-cp311-cp311-win32.whl", hash = "sha256:5a6ebcdef0ef12005d56d38be30f5156d1cb3373b52e96f147f4a24b0ddb3a9d"}, + {file = "grpcio-1.51.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f9b0023c2c92bebd1be72cdfca23004ea748be1813a66d684d49d67d836adde"}, + {file = "grpcio-1.51.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:cd3baccea2bc5c38aeb14e5b00167bd4e2373a373a5e4d8d850bd193edad150c"}, + {file = "grpcio-1.51.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:17ec9b13cec4a286b9e606b48191e560ca2f3bbdf3986f91e480a95d1582e1a7"}, + {file = "grpcio-1.51.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:fbdbe9a849854fe484c00823f45b7baab159bdd4a46075302281998cb8719df5"}, + {file = "grpcio-1.51.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31bb6bc7ff145e2771c9baf612f4b9ebbc9605ccdc5f3ff3d5553de7fc0e0d79"}, + {file = "grpcio-1.51.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e473525c28251558337b5c1ad3fa969511e42304524a4e404065e165b084c9e4"}, + {file = "grpcio-1.51.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6f0b89967ee11f2b654c23b27086d88ad7bf08c0b3c2a280362f28c3698b2896"}, + {file = "grpcio-1.51.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7942b32a291421460d6a07883033e392167d30724aa84987e6956cd15f1a21b9"}, + {file = "grpcio-1.51.1-cp37-cp37m-win32.whl", hash = "sha256:f96ace1540223f26fbe7c4ebbf8a98e3929a6aa0290c8033d12526847b291c0f"}, + {file = "grpcio-1.51.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f1fec3abaf274cdb85bf3878167cfde5ad4a4d97c68421afda95174de85ba813"}, + {file = "grpcio-1.51.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:0e1a9e1b4a23808f1132aa35f968cd8e659f60af3ffd6fb00bcf9a65e7db279f"}, + {file = "grpcio-1.51.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:6df3b63538c362312bc5fa95fb965069c65c3ea91d7ce78ad9c47cab57226f54"}, + {file = "grpcio-1.51.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:172405ca6bdfedd6054c74c62085946e45ad4d9cec9f3c42b4c9a02546c4c7e9"}, + {file = "grpcio-1.51.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506b9b7a4cede87d7219bfb31014d7b471cfc77157da9e820a737ec1ea4b0663"}, + {file = "grpcio-1.51.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb93051331acbb75b49a2a0fd9239c6ba9528f6bdc1dd400ad1cb66cf864292"}, + {file = "grpcio-1.51.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5dca372268c6ab6372d37d6b9f9343e7e5b4bc09779f819f9470cd88b2ece3c3"}, + {file = "grpcio-1.51.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:471d39d3370ca923a316d49c8aac66356cea708a11e647e3bdc3d0b5de4f0a40"}, + {file = "grpcio-1.51.1-cp38-cp38-win32.whl", hash = "sha256:75e29a90dc319f0ad4d87ba6d20083615a00d8276b51512e04ad7452b5c23b04"}, + {file = "grpcio-1.51.1-cp38-cp38-win_amd64.whl", hash = "sha256:f1158bccbb919da42544a4d3af5d9296a3358539ffa01018307337365a9a0c64"}, + {file = "grpcio-1.51.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:59dffade859f157bcc55243714d57b286da6ae16469bf1ac0614d281b5f49b67"}, + {file = "grpcio-1.51.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:dad6533411d033b77f5369eafe87af8583178efd4039c41d7515d3336c53b4f1"}, + {file = "grpcio-1.51.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:4c4423ea38a7825b8fed8934d6d9aeebdf646c97e3c608c3b0bcf23616f33877"}, + {file = "grpcio-1.51.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0dc5354e38e5adf2498312f7241b14c7ce3484eefa0082db4297189dcbe272e6"}, + {file = "grpcio-1.51.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d67983189e2e45550eac194d6234fc38b8c3b5396c153821f2d906ed46e0ce"}, + {file = "grpcio-1.51.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:538d981818e49b6ed1e9c8d5e5adf29f71c4e334e7d459bf47e9b7abb3c30e09"}, + {file = "grpcio-1.51.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9235dcd5144a83f9ca6f431bd0eccc46b90e2c22fe27b7f7d77cabb2fb515595"}, + {file = "grpcio-1.51.1-cp39-cp39-win32.whl", hash = "sha256:aacb54f7789ede5cbf1d007637f792d3e87f1c9841f57dd51abf89337d1b8472"}, + {file = "grpcio-1.51.1-cp39-cp39-win_amd64.whl", hash = "sha256:2b170eaf51518275c9b6b22ccb59450537c5a8555326fd96ff7391b5dd75303c"}, + {file = "grpcio-1.51.1.tar.gz", hash = "sha256:e6dfc2b6567b1c261739b43d9c59d201c1b89e017afd9e684d85aa7a186c9f7a"}, @@ -3211,2 +3208,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, + {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, + {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, @@ -3262 +3259 @@ libcommon = [ - {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:0267504716992f562382ff5029ace87444fd12793f2393f3800921d384a0fd52"}, + {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:d6e6e8d7c500846e202eccb9e106202e07a40d80b81900369d59d437d7750784"}, @@ -3834,2 +3831,2 @@ pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, + {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, + {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, @@ -4037,30 +4034,26 @@ pycryptodomex = [ - {file = "pycryptodomex-3.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6f5b6ba8aefd624834bc177a2ac292734996bb030f9d1b388e7504103b6fcddf"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:4540904c09704b6f831059c0dfb38584acb82cb97b0125cd52688c1f1e3fffa6"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0fadb9f7fa3150577800eef35f62a8a24b9ddf1563ff060d9bd3af22d3952c8c"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fc9bc7a9b79fe5c750fc81a307052f8daabb709bdaabb0fb18fb136b66b653b5"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f8be976cec59b11f011f790b88aca67b4ea2bd286578d0bd3e31bcd19afcd3e4"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:78d9621cf0ea35abf2d38fa2ca6d0634eab6c991a78373498ab149953787e5e5"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-win32.whl", hash = "sha256:b6306403228edde6e289f626a3908a2f7f67c344e712cf7c0a508bab3ad9e381"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:48697790203909fab02a33226fda546604f4e2653f9d47bc5d3eb40879fa7c64"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:18e2ab4813883ae63396c0ffe50b13554b32bb69ec56f0afaf052e7a7ae0d55b"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3709f13ca3852b0b07fc04a2c03b379189232b24007c466be0f605dd4723e9d4"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:191e73bc84a8064ad1874dba0ebadedd7cce4dedee998549518f2c74a003b2e1"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e3164a18348bd53c69b4435ebfb4ac8a4076291ffa2a70b54f0c4b80c7834b1d"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:5676a132169a1c1a3712edf25250722ebc8c9102aa9abd814df063ca8362454f"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:e2b12968522a0358b8917fc7b28865acac002f02f4c4c6020fcb264d76bfd06d"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux1_i686.whl", hash = "sha256:e47bf8776a7e15576887f04314f5228c6527b99946e6638cf2f16da56d260cab"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:996e1ba717077ce1e6d4849af7a1426f38b07b3d173b879e27d5e26d2e958beb"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:65204412d0c6a8e3c41e21e93a5e6054a74fea501afa03046a388cf042e3377a"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:dd452a5af7014e866206d41751886c9b4bf379a339fdf2dbfc7dd16c0fb4f8e0"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:b9279adc16e4b0f590ceff581f53a80179b02cba9056010d733eb4196134a870"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-win32.whl", hash = "sha256:46b3f05f2f7ac7841053da4e0f69616929ca3c42f238c405f6c3df7759ad2780"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-win_amd64.whl", hash = "sha256:8eecdf9cdc7343001d047f951b9cc805cd68cb6cd77b20ea46af5bffc5bd3dfb"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:67e1e6a92151023ccdfcfbc0afb3314ad30080793b4c27956ea06ab1fb9bcd8a"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:c4cb9cb492ea7dcdf222a8d19a1d09002798ea516aeae8877245206d27326d86"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:94c7b60e1f52e1a87715571327baea0733708ab4723346598beca4a3b6879794"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-win32.whl", hash = "sha256:04cc393045a8f19dd110c975e30f38ed7ab3faf21ede415ea67afebd95a22380"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0776bfaf2c48154ab54ea45392847c1283d2fcf64e232e85565f858baedfc1fa"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:463119d7d22d0fc04a0f9122e9d3e6121c6648bcb12a052b51bd1eed1b996aa2"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a07a64709e366c2041cd5cfbca592b43998bf4df88f7b0ca73dca37071ccf1bd"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:35a8f7afe1867118330e2e0e0bf759c409e28557fb1fc2fbb1c6c937297dbe9a"}, - {file = "pycryptodomex-3.15.0.tar.gz", hash = "sha256:7341f1bb2dadb0d1a0047f34c3a58208a92423cdbd3244d998e4b28df5eac0ed"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b3d04c00d777c36972b539fb79958790126847d84ec0129fce1efef250bfe3ce"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e5a670919076b71522c7d567a9043f66f14b202414a63c3a078b5831ae342c03"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:ce338a9703f54b2305a408fc9890eb966b727ce72b69f225898bb4e9d9ed3f1f"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:a1c0ae7123448ecb034c75c713189cb00ebe2d415b11682865b6c54d200d9c93"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-win32.whl", hash = "sha256:8851585ff19871e5d69e1790f4ca5f6fd1699d6b8b14413b472a4c0dbc7ea780"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-win_amd64.whl", hash = "sha256:8dd2d9e3c617d0712ed781a77efd84ea579e76c5f9b2a4bc0b684ebeddf868b2"}, + {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2ad9bb86b355b6104796567dd44c215b3dc953ef2fae5e0bdfb8516731df92cf"}, + {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e25a2f5667d91795f9417cb856f6df724ccdb0cdd5cbadb212ee9bf43946e9f8"}, + {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b0789a8490114a2936ed77c87792cfe77582c829cb43a6d86ede0f9624ba8aa3"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:0da835af786fdd1c9930994c78b23e88d816dc3f99aa977284a21bbc26d19735"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:22aed0868622d95179217c298e37ed7410025c7b29dac236d3230617d1e4ed56"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1619087fb5b31510b0b0b058a54f001a5ffd91e6ffee220d9913064519c6a69d"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70288d9bfe16b2fd0d20b6c365db614428f1bcde7b20d56e74cf88ade905d9eb"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7993d26dae4d83b8f4ce605bb0aecb8bee330bb3c95475ef06f3694403621e71"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:1cda60207be8c1cf0b84b9138f9e3ca29335013d2b690774a5e94678ff29659a"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:04610536921c1ec7adba158ef570348550c9f3a40bc24be9f8da2ef7ab387981"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-win32.whl", hash = "sha256:daa67f5ebb6fbf1ee9c90decaa06ca7fc88a548864e5e484d52b0920a57fe8a5"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-win_amd64.whl", hash = "sha256:231dc8008cbdd1ae0e34645d4523da2dbc7a88c325f0d4a59635a86ee25b41dd"}, + {file = "pycryptodomex-3.16.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:4dbbe18cc232b5980c7633972ae5417d0df76fe89e7db246eefd17ef4d8e6d7a"}, + {file = "pycryptodomex-3.16.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:893f8a97d533c66cc3a56e60dd3ed40a3494ddb4aafa7e026429a08772f8a849"}, + {file = "pycryptodomex-3.16.0-pp27-pypy_73-win32.whl", hash = "sha256:6a465e4f856d2a4f2a311807030c89166529ccf7ccc65bef398de045d49144b6"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba57ac7861fd2c837cdb33daf822f2a052ff57dd769a2107807f52a36d0e8d38"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f2b971a7b877348a27dcfd0e772a0343fb818df00b74078e91c008632284137d"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e2453162f473c1eae4826eb10cd7bce19b5facac86d17fb5f29a570fde145abd"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0ba28aa97cdd3ff5ed1a4f2b7f5cd04e721166bd75bd2b929e2734433882b583"}, + {file = "pycryptodomex-3.16.0.tar.gz", hash = "sha256:e9ba9d8ed638733c9e95664470b71d624a6def149e2db6cc52c1aca5a6a2df1d"}, @@ -5174,2 +5167,2 @@ zipp = [ - {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, - {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, + {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, + {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, diff --git a/workers/first_rows/poetry.toml b/workers/datasets_based/poetry.toml similarity index 100% rename from workers/first_rows/poetry.toml rename to workers/datasets_based/poetry.toml diff --git a/workers/splits/pyproject.toml b/workers/datasets_based/pyproject.toml similarity index 91% rename from workers/splits/pyproject.toml rename to workers/datasets_based/pyproject.toml index 86223966..d6539905 100644 --- a/workers/splits/pyproject.toml +++ b/workers/datasets_based/pyproject.toml @@ -3,2 +3,2 @@ authors = ["Sylvain Lesage <[email protected]>"] -description = "Worker that pre-computes and caches the response to /splits" -name = "splits" +description = "Worker for processing steps that need the datasets library" +name = "datasets_based" @@ -36 +36 @@ transformers = "^4.11.3" -trec-car-tools = { path = "../../vendors/trec-car-tools/python3" } +trec-car-tools = { path = "vendors/trec-car-tools/python3" } @@ -64 +64 @@ markers = [ -source = ["splits"] +source = ["datasets_based"] diff --git a/workers/first_rows/src/first_rows/__init__.py b/workers/datasets_based/src/datasets_based/__init__.py similarity index 100% rename from workers/first_rows/src/first_rows/__init__.py rename to workers/datasets_based/src/datasets_based/__init__.py diff --git a/workers/first_rows/src/first_rows/asset.py b/workers/datasets_based/src/datasets_based/asset.py similarity index 100% rename from workers/first_rows/src/first_rows/asset.py rename to workers/datasets_based/src/datasets_based/asset.py diff --git a/workers/first_rows/src/first_rows/config.py b/workers/datasets_based/src/datasets_based/config.py similarity index 87% rename from workers/first_rows/src/first_rows/config.py rename to workers/datasets_based/src/datasets_based/config.py index 997d773e..496df866 100644 --- a/workers/first_rows/src/first_rows/config.py +++ b/workers/datasets_based/src/datasets_based/config.py @@ -15,0 +16,9 @@ from libcommon.config import ( +class DatasetsBasedConfig: + endpoint: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("DATASETS_BASED_"): + self.endpoint = env.str(name="ENDPOINT", default="/splits") + + @@ -35,0 +45 @@ class AppConfig: + datasets_based: DatasetsBasedConfig @@ -44,0 +55 @@ class AppConfig: + self.datasets_based = DatasetsBasedConfig() diff --git a/workers/first_rows/src/first_rows/features.py b/workers/datasets_based/src/datasets_based/features.py similarity index 99% rename from workers/first_rows/src/first_rows/features.py rename to workers/datasets_based/src/datasets_based/features.py index b32d70dc..c626ade3 100644 --- a/workers/first_rows/src/first_rows/features.py +++ b/workers/datasets_based/src/datasets_based/features.py @@ -24 +24 @@ from PIL import Image as PILImage # type: ignore -from first_rows.asset import create_audio_files, create_image_file +from datasets_based.asset import create_audio_files, create_image_file diff --git a/workers/datasets_based/src/datasets_based/main.py b/workers/datasets_based/src/datasets_based/main.py new file mode 100644 index 00000000..0020daae --- /dev/null +++ b/workers/datasets_based/src/datasets_based/main.py @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from datasets_based.config import AppConfig +from datasets_based.worker import get_worker + +if __name__ == "__main__": + app_config = AppConfig() + worker = get_worker(app_config) + worker.loop() diff --git a/workers/first_rows/src/first_rows/py.typed b/workers/datasets_based/src/datasets_based/py.typed similarity index 100% rename from workers/first_rows/src/first_rows/py.typed rename to workers/datasets_based/src/datasets_based/py.typed diff --git a/workers/datasets_based/src/datasets_based/worker.py b/workers/datasets_based/src/datasets_based/worker.py new file mode 100644 index 00000000..a76b1c4a --- /dev/null +++ b/workers/datasets_based/src/datasets_based/worker.py @@ -0,0 +1,27 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Mapping, Type, Union + +from datasets_based.config import AppConfig +from datasets_based.workers.first_rows import FirstRowsWorker +from datasets_based.workers.splits import SplitsWorker + +DatasetsBasedWorker = Union[SplitsWorker, FirstRowsWorker] + + +def get_worker(app_config: AppConfig) -> DatasetsBasedWorker: + """Get the worker for the current environment.""" + + datasets_based_worker_classes: Mapping[str, Type[DatasetsBasedWorker]] = { + "/splits": SplitsWorker, + "/first-rows": FirstRowsWorker, + } + try: + endpoint = app_config.datasets_based.endpoint + worker = datasets_based_worker_classes[endpoint](app_config=app_config, endpoint=endpoint) + except KeyError as e: + raise ValueError( + f"Unknown worker name '{endpoint}'. Available workers are: {list(datasets_based_worker_classes.keys())}" + ) from e + return worker diff --git a/workers/splits/src/splits/py.typed b/workers/datasets_based/src/datasets_based/workers/__init__.py similarity index 100% rename from workers/splits/src/splits/py.typed rename to workers/datasets_based/src/datasets_based/workers/__init__.py diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/datasets_based/src/datasets_based/workers/first_rows.py similarity index 99% rename from workers/first_rows/src/first_rows/worker.py rename to workers/datasets_based/src/datasets_based/workers/first_rows.py index 39cf2b01..001bf5ce 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/datasets_based/src/datasets_based/workers/first_rows.py @@ -26,2 +26,2 @@ from libcommon.worker import ConfigNotFoundError, SplitNotFoundError, Worker -from first_rows.config import AppConfig, CacheConfig, FirstRowsConfig -from first_rows.features import get_cell_value +from datasets_based.config import AppConfig, CacheConfig, FirstRowsConfig +from datasets_based.features import get_cell_value @@ -561 +561 @@ class FirstRowsWorker(Worker): - version=importlib.metadata.version(__package__), + version=importlib.metadata.version(__package__.split(".")[0]), diff --git a/workers/splits/src/splits/worker.py b/workers/datasets_based/src/datasets_based/workers/splits.py similarity index 98% rename from workers/splits/src/splits/worker.py rename to workers/datasets_based/src/datasets_based/workers/splits.py index ffcc97ff..13f1022f 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/datasets_based/src/datasets_based/workers/splits.py @@ -20 +20 @@ from libcommon.worker import Queue, Worker -from splits.config import AppConfig +from datasets_based.config import AppConfig @@ -173 +173 @@ class SplitsWorker(Worker): - version=importlib.metadata.version(__package__), + version=importlib.metadata.version(__package__.split(".")[0]), diff --git a/workers/first_rows/tests/__init__.py b/workers/datasets_based/tests/__init__.py similarity index 100% rename from workers/first_rows/tests/__init__.py rename to workers/datasets_based/tests/__init__.py diff --git a/workers/first_rows/tests/conftest.py b/workers/datasets_based/tests/conftest.py similarity index 92% rename from workers/first_rows/tests/conftest.py rename to workers/datasets_based/tests/conftest.py index 6014ab77..08f13bd4 100644 --- a/workers/first_rows/tests/conftest.py +++ b/workers/datasets_based/tests/conftest.py @@ -6 +6 @@ from pytest import MonkeyPatch, fixture -from first_rows.config import AppConfig +from datasets_based.config import AppConfig @@ -20 +19,0 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): - monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) diff --git a/workers/first_rows/tests/fixtures/__init__.py b/workers/datasets_based/tests/fixtures/__init__.py similarity index 100% rename from workers/first_rows/tests/fixtures/__init__.py rename to workers/datasets_based/tests/fixtures/__init__.py diff --git a/workers/first_rows/tests/fixtures/data/test_image_rgb.jpg b/workers/datasets_based/tests/fixtures/data/test_image_rgb.jpg similarity index 100% rename from workers/first_rows/tests/fixtures/data/test_image_rgb.jpg rename to workers/datasets_based/tests/fixtures/data/test_image_rgb.jpg diff --git a/workers/first_rows/tests/fixtures/datasets.py b/workers/datasets_based/tests/fixtures/datasets.py similarity index 100% rename from workers/first_rows/tests/fixtures/datasets.py rename to workers/datasets_based/tests/fixtures/datasets.py diff --git a/workers/first_rows/tests/fixtures/files.py b/workers/datasets_based/tests/fixtures/files.py similarity index 100% rename from workers/first_rows/tests/fixtures/files.py rename to workers/datasets_based/tests/fixtures/files.py diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/datasets_based/tests/fixtures/hub.py similarity index 100% rename from workers/first_rows/tests/fixtures/hub.py rename to workers/datasets_based/tests/fixtures/hub.py diff --git a/workers/first_rows/tests/test_features.py b/workers/datasets_based/tests/test_features.py similarity index 99% rename from workers/first_rows/tests/test_features.py rename to workers/datasets_based/tests/test_features.py index 58f46fa1..f41ca73f 100644 --- a/workers/first_rows/tests/test_features.py +++ b/workers/datasets_based/tests/test_features.py @@ -12,2 +12,2 @@ from datasets import Audio, Dataset, Image, Value -from first_rows.config import AppConfig -from first_rows.features import get_cell_value +from datasets_based.config import AppConfig +from datasets_based.features import get_cell_value diff --git a/workers/datasets_based/tests/test_worker.py b/workers/datasets_based/tests/test_worker.py new file mode 100644 index 00000000..db013c1d --- /dev/null +++ b/workers/datasets_based/tests/test_worker.py @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Optional + +import pytest + +from datasets_based.config import AppConfig +from datasets_based.worker import get_worker + + [email protected]( + "endpoint,expected_worker", + [ + (None, "SplitsWorker"), + ("/splits", "SplitsWorker"), + ("/first-rows", "SplitsWorker"), + ("/unknown", None), + ], +) +def test_get_worker(app_config: AppConfig, endpoint: Optional[str], expected_worker: Optional[str]) -> None: + if endpoint is not None: + app_config.datasets_based.endpoint = endpoint + if expected_worker is None: + with pytest.raises(ValueError): + get_worker(app_config) + else: + worker = get_worker(app_config) + worker.__class__.__name__ == expected_worker diff --git a/workers/datasets_based/tests/workers/__init__.py b/workers/datasets_based/tests/workers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/workers/first_rows/tests/test_worker.py b/workers/datasets_based/tests/workers/test_first_rows.py similarity index 98% rename from workers/first_rows/tests/test_worker.py rename to workers/datasets_based/tests/workers/test_first_rows.py index ab1e68ac..f12f7c79 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/datasets_based/tests/workers/test_first_rows.py @@ -13,2 +13,2 @@ from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_resp -from first_rows.config import AppConfig -from first_rows.worker import ( +from datasets_based.config import AppConfig +from datasets_based.workers.first_rows import ( @@ -20 +20 @@ from first_rows.worker import ( -from .fixtures.hub import HubDatasets, get_default_config_split +from ..fixtures.hub import HubDatasets, get_default_config_split diff --git a/workers/splits/tests/test_worker.py b/workers/datasets_based/tests/workers/test_splits.py similarity index 96% rename from workers/splits/tests/test_worker.py rename to workers/datasets_based/tests/workers/test_splits.py index a8da4127..cf335c72 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/datasets_based/tests/workers/test_splits.py @@ -11,2 +11,2 @@ from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_resp -from splits.config import AppConfig -from splits.worker import SplitsWorker, compute_splits_response +from datasets_based.config import AppConfig +from datasets_based.workers.splits import SplitsWorker, compute_splits_response @@ -14 +14 @@ from splits.worker import SplitsWorker, compute_splits_response -from .fixtures.hub import HubDatasets +from ..fixtures.hub import HubDatasets diff --git a/vendors/trec-car-tools/.gitignore b/workers/datasets_based/vendors/trec-car-tools/.gitignore similarity index 100% rename from vendors/trec-car-tools/.gitignore rename to workers/datasets_based/vendors/trec-car-tools/.gitignore diff --git a/vendors/trec-car-tools/.travis.yml b/workers/datasets_based/vendors/trec-car-tools/.travis.yml similarity index 100% rename from vendors/trec-car-tools/.travis.yml rename to workers/datasets_based/vendors/trec-car-tools/.travis.yml diff --git a/vendors/trec-car-tools/.travis/test.sh b/workers/datasets_based/vendors/trec-car-tools/.travis/test.sh similarity index 100% rename from vendors/trec-car-tools/.travis/test.sh rename to workers/datasets_based/vendors/trec-car-tools/.travis/test.sh diff --git a/vendors/trec-car-tools/LICENSE b/workers/datasets_based/vendors/trec-car-tools/LICENSE similarity index 100% rename from vendors/trec-car-tools/LICENSE rename to workers/datasets_based/vendors/trec-car-tools/LICENSE diff --git a/vendors/trec-car-tools/README.mkd b/workers/datasets_based/vendors/trec-car-tools/README.mkd similarity index 100% rename from vendors/trec-car-tools/README.mkd rename to workers/datasets_based/vendors/trec-car-tools/README.mkd diff --git a/vendors/trec-car-tools/python3/Makefile b/workers/datasets_based/vendors/trec-car-tools/python3/Makefile similarity index 100% rename from vendors/trec-car-tools/python3/Makefile rename to workers/datasets_based/vendors/trec-car-tools/python3/Makefile diff --git a/vendors/trec-car-tools/python3/README.mkd b/workers/datasets_based/vendors/trec-car-tools/python3/README.mkd similarity index 100% rename from vendors/trec-car-tools/python3/README.mkd rename to workers/datasets_based/vendors/trec-car-tools/python3/README.mkd diff --git a/vendors/trec-car-tools/python3/annotated_content.py b/workers/datasets_based/vendors/trec-car-tools/python3/annotated_content.py similarity index 100% rename from vendors/trec-car-tools/python3/annotated_content.py rename to workers/datasets_based/vendors/trec-car-tools/python3/annotated_content.py diff --git a/vendors/trec-car-tools/python3/conf.py b/workers/datasets_based/vendors/trec-car-tools/python3/conf.py similarity index 100% rename from vendors/trec-car-tools/python3/conf.py rename to workers/datasets_based/vendors/trec-car-tools/python3/conf.py diff --git a/vendors/trec-car-tools/python3/format_runs_test.py b/workers/datasets_based/vendors/trec-car-tools/python3/format_runs_test.py similarity index 100% rename from vendors/trec-car-tools/python3/format_runs_test.py rename to workers/datasets_based/vendors/trec-car-tools/python3/format_runs_test.py diff --git a/vendors/trec-car-tools/python3/index.rst b/workers/datasets_based/vendors/trec-car-tools/python3/index.rst similarity index 100% rename from vendors/trec-car-tools/python3/index.rst rename to workers/datasets_based/vendors/trec-car-tools/python3/index.rst diff --git a/vendors/trec-car-tools/python3/read_data_test.py b/workers/datasets_based/vendors/trec-car-tools/python3/read_data_test.py similarity index 100% rename from vendors/trec-car-tools/python3/read_data_test.py rename to workers/datasets_based/vendors/trec-car-tools/python3/read_data_test.py diff --git a/vendors/trec-car-tools/python3/requirements.txt b/workers/datasets_based/vendors/trec-car-tools/python3/requirements.txt similarity index 100% rename from vendors/trec-car-tools/python3/requirements.txt rename to workers/datasets_based/vendors/trec-car-tools/python3/requirements.txt diff --git a/vendors/trec-car-tools/python3/setup.py b/workers/datasets_based/vendors/trec-car-tools/python3/setup.py similarity index 100% rename from vendors/trec-car-tools/python3/setup.py rename to workers/datasets_based/vendors/trec-car-tools/python3/setup.py diff --git a/vendors/trec-car-tools/python3/test.py b/workers/datasets_based/vendors/trec-car-tools/python3/test.py similarity index 100% rename from vendors/trec-car-tools/python3/test.py rename to workers/datasets_based/vendors/trec-car-tools/python3/test.py diff --git a/vendors/trec-car-tools/python3/trec_car/__init__.py b/workers/datasets_based/vendors/trec-car-tools/python3/trec_car/__init__.py similarity index 100% rename from vendors/trec-car-tools/python3/trec_car/__init__.py rename to workers/datasets_based/vendors/trec-car-tools/python3/trec_car/__init__.py diff --git a/vendors/trec-car-tools/python3/trec_car/format_runs.py b/workers/datasets_based/vendors/trec-car-tools/python3/trec_car/format_runs.py similarity index 100% rename from vendors/trec-car-tools/python3/trec_car/format_runs.py rename to workers/datasets_based/vendors/trec-car-tools/python3/trec_car/format_runs.py diff --git a/vendors/trec-car-tools/python3/trec_car/read_data.py b/workers/datasets_based/vendors/trec-car-tools/python3/trec_car/read_data.py similarity index 100% rename from vendors/trec-car-tools/python3/trec_car/read_data.py rename to workers/datasets_based/vendors/trec-car-tools/python3/trec_car/read_data.py diff --git a/vendors/trec-car-tools/trec-car-tools-example/pom.xml b/workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/pom.xml similarity index 100% rename from vendors/trec-car-tools/trec-car-tools-example/pom.xml rename to workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/pom.xml diff --git a/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarBuildLuceneIndex.java b/workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarBuildLuceneIndex.java similarity index 100% rename from vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarBuildLuceneIndex.java rename to workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarBuildLuceneIndex.java diff --git a/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarQueryLuceneIndex.java b/workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarQueryLuceneIndex.java similarity index 100% rename from vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarQueryLuceneIndex.java rename to workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarQueryLuceneIndex.java diff --git a/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarToolsExample.java b/workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarToolsExample.java similarity index 100% rename from vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarToolsExample.java rename to workers/datasets_based/vendors/trec-car-tools/trec-car-tools-example/src/main/java/edu/unh/cs/TrecCarToolsExample.java diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml deleted file mode 100644 index e8d26e48..00000000 --- a/workers/first_rows/pyproject.toml +++ /dev/null @@ -1,80 +0,0 @@ -[tool.poetry] -authors = ["Sylvain Lesage <[email protected]>"] -description = "Worker that pre-computes and caches the response to /first-rows" -name = "first_rows" -version = "0.0.1" -license = "Apache-2.0" - -[tool.poetry.dependencies] -Pillow = "^9.3.0" -PyICU = "^2.7.4" -aiohttp = "^3.7.4.post0" -apache-beam = "2.41.0" # ^2 gives a InvalidWheelName error because it tries to install 2.42 that has not been released... -bs4 = "^0.0.1" -conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.7.1" } -gdown = "^4.2.0" -huggingface-hub = "^0.11.0" -kenlm = { url = "https://github.com/kpu/kenlm/archive/master.zip" } -kss = "^2.6.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl", develop = false } -lm-dataformat = "^0.0.20" -lxml = "^4.9.1" -nlp = "^0.4.0" -nltk = "^3.6.5" -openpyxl = "^3.0.9" -py7zr = "^0.17.4" -pydub = "^0.25.1" -python = "3.9.6" -rarfile = "^4.0" -sklearn = "^0.0" -tensorflow = {version = "^2.9.1", markers = "sys_platform != 'darwin' or platform_machine != 'arm64'"} -tensorflow-macos = {version = "^2.9.1", markers = "sys_platform == 'darwin' and platform_machine == 'arm64'"} -tfrecord = "^1.14.1" -torchaudio = "^0.10.1" -transformers = "^4.11.3" -trec-car-tools = { path = "../../vendors/trec-car-tools/python3" } -typer = "^0.4.0" -wget = "^3.2" - -[tool.poetry.group.dev.dependencies] -bandit = "^1.7.0" -black = "^22.1.0" -flake8 = "^3.9.2" -isort = "^5.9.3" -mypy = "0.812" -poetryup = "^0.3.8" -pytest = "^7.2.0" -pytest-cov = "^2.12.1" -pip-audit = "^2.4.6" -types-requests = "^2.28.11" - -[build-system] -build-backend = "poetry.core.masonry.api" -requires = ["poetry-core>=1.0.0"] - -[tool.pytest.ini_options] -# addopts = "-k 'wip'" -filterwarnings = ["ignore::DeprecationWarning"] -markers = [ - "wip: tests being developed" -] - -[tool.coverage.run] -source = ["first_rows"] - -[tool.isort] -profile = "black" - -[tool.black] -line-length = 119 -preview = true - -[tool.mypy] -exclude = 'vendors' -strict = true - -[[tool.mypy.overrides]] -# could be solved in datasets by adding __add__ to the __init__.py file -implicit_reexport = true -module = "datasets" diff --git a/workers/first_rows/src/first_rows/main.py b/workers/first_rows/src/first_rows/main.py deleted file mode 100644 index 7b154b9d..00000000 --- a/workers/first_rows/src/first_rows/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from first_rows.config import AppConfig -from first_rows.worker import FirstRowsWorker - -if __name__ == "__main__": - app_config = AppConfig() - FIRST_ROWS_ENDPOINT = "/first-rows" - FirstRowsWorker(app_config=app_config, endpoint=FIRST_ROWS_ENDPOINT).loop() diff --git a/workers/splits/.flake8 b/workers/splits/.flake8 deleted file mode 100644 index f7d6157c..00000000 --- a/workers/splits/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -# Recommend matching the black line length (119), -# rather than using the flake8 default of 79: -max-line-length = 119 -extend-ignore = "E203" diff --git a/workers/splits/.python-version b/workers/splits/.python-version deleted file mode 100644 index 1635d0f5..00000000 --- a/workers/splits/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.6 diff --git a/workers/splits/Dockerfile b/workers/splits/Dockerfile deleted file mode 100644 index bdc1edf5..00000000 --- a/workers/splits/Dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -# build with -# docker build -t some_tag_worker -f Dockerfile ../.. -FROM python:3.9.6-slim - -ENV PYTHONFAULTHANDLER=1 \ - PYTHONUNBUFFERED=1 \ - PYTHONHASHSEED=random \ - PIP_NO_CACHE_DIR=off \ - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_DEFAULT_TIMEOUT=100 \ - POETRY_NO_INTERACTION=1 \ - # Versions: - POETRY_VERSION=1.2.2 \ - POETRY_VIRTUALENVS_IN_PROJECT=true - -# System deps: -RUN apt-get update \ - && apt-get install -y build-essential unzip wget python3-dev make \ - libicu-dev ffmpeg libavcodec-extra libsndfile1 llvm pkg-config \ - && rm -rf /var/lib/apt/lists/* - -RUN pip install -U --no-cache-dir pip -RUN pip install "poetry==$POETRY_VERSION" - -WORKDIR /src -COPY libs/libcommon/dist ./libs/libcommon/dist -COPY workers/splits/src ./workers/splits/src -COPY workers/splits/poetry.lock ./workers/splits/poetry.lock -COPY workers/splits/pyproject.toml ./workers/splits/pyproject.toml -COPY vendors ./vendors/ -WORKDIR /src/workers/splits/ -RUN poetry install - -ENTRYPOINT ["poetry", "run", "python", "src/splits/main.py"] diff --git a/workers/splits/Makefile b/workers/splits/Makefile deleted file mode 100644 index 7bb6d766..00000000 --- a/workers/splits/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# environment variables for the commands (docker compose, poetry) -export COMPOSE_PROJECT_NAME := splits -export MONGO_PORT := 27040 -export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} -export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} -# makefile variables -DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml - -include ../../tools/Python.mk -include ../../tools/PythonTest.mk -include ../../tools/Docker.mk - -.PHONY: run -run: - poetry run python src/splits/main.py - -.PHONY: pip-audit -pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" -# ^ we remove problematic lines to have a working pip-audit. See https://github.com/pypa/pip-audit/issues/84#issuecomment-1326203111 for "requests" diff --git a/workers/splits/README.md b/workers/splits/README.md deleted file mode 100644 index 5585339f..00000000 --- a/workers/splits/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Datasets server - worker - -> Worker that pre-computes and caches the response to /splits - -## Configuration - -The worker can be configured using environment variables. They are grouped by scope. - -### Datasets library - -The following environment variables are used to configure two dependencies: the `datasets` and `numba` libraries: - -- `HF_DATASETS_CACHE`: directory where the `datasets` library will store the cached datasets data. Defaults to `~/.cache/huggingface/datasets`. -- `HF_MODULES_CACHE`: directory where the `datasets` library will store the cached datasets scripts. Defaults to `~/.cache/huggingface/modules`. -- `NUMBA_CACHE_DIR`: directory where the `numba` decorators (used by `librosa`) can write cache. Required on cloud infrastructure (see https://stackoverflow.com/a/63367171/7351594). - -If the Hub is not https://huggingface.co (i.e. if you set the `COMMON_HF_ENDPOINT` environment variable), you should also set the `HF_ENDPOINT` environment variable to the same value. See https://github.com/huggingface/datasets/pull/5196 for more details. - -### Common - -See [../../libs/libcommon/README.md](../../libs/libcommon/README.md) for more information about the common configuration. diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock deleted file mode 100644 index 5bf45816..00000000 --- a/workers/splits/poetry.lock +++ /dev/null @@ -1,5229 +0,0 @@ -[[package]] -name = "absl-py" -version = "1.3.0" -description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "aiohttp" -version = "3.8.3" -description = "Async http client/server framework (asyncio)" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "apache-beam" -version = "2.41.0" -description = "Apache Beam SDK for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -cloudpickle = ">=2.1.0,<3" -crcmod = ">=1.7,<2.0" -dill = ">=0.3.1.1,<0.3.2" -fastavro = ">=0.23.6,<2" -grpcio = ">=1.33.1,<2" -hdfs = ">=2.1.0,<3.0.0" -httplib2 = ">=0.8,<0.21.0" -numpy = ">=1.14.3,<1.23.0" -orjson = "<4.0" -proto-plus = ">=1.7.1,<2" -protobuf = ">=3.12.2,<4" -pyarrow = ">=0.15.1,<8.0.0" -pydot = ">=1.2.0,<2" -pymongo = ">=3.8.0,<4.0.0" -python-dateutil = ">=2.8.0,<3" -pytz = ">=2018.3" -requests = ">=2.24.0,<3.0.0" -typing-extensions = ">=3.7.0" - -[package.extras] -aws = ["boto3 (>=1.9)"] -azure = ["azure-core (>=1.7.0)", "azure-storage-blob (>=12.3.2)"] -dataframe = ["pandas (>=1.0,<1.5)"] -docs = ["Sphinx (>=1.5.2,<2.0)", "docutils (==0.17.1)"] -gcp = ["cachetools (>=3.1.0,<5)", "google-api-core (!=2.8.2,<3)", "google-apitools (>=0.5.31,<0.5.32)", "google-auth (>=1.18.0,<3)", "google-auth-httplib2 (>=0.1.0,<0.2.0)", "google-cloud-bigquery (>=1.6.0,<3)", "google-cloud-bigquery-storage (>=2.6.3,<2.14)", "google-cloud-bigtable (>=0.31.1,<2)", "google-cloud-core (>=0.28.1,<3)", "google-cloud-datastore (>=1.8.0,<2)", "google-cloud-dlp (>=3.0.0,<4)", "google-cloud-language (>=1.3.0,<2)", "google-cloud-pubsub (>=2.1.0,<3)", "google-cloud-pubsublite (>=1.2.0,<2)", "google-cloud-recommendations-ai (>=0.1.0,<0.8.0)", "google-cloud-spanner (>=1.13.0,<2)", "google-cloud-videointelligence (>=1.8.0,<2)", "google-cloud-vision (>=0.38.0,<2)", "grpcio-gcp (>=0.2.2,<1)"] -interactive = ["facets-overview (>=1.0.0,<2)", "google-cloud-dataproc (>=3.0.0,<3.2.0)", "ipykernel (>=6,<7)", "ipython (>=7,<8)", "ipython (>=8,<9)", "ipywidgets (>=7.6.5,<8)", "jupyter-client (>=6.1.11,<6.1.13)", "timeloop (>=1.0.2,<2)"] -interactive-test = ["chromedriver-binary (>=100,<101)", "nbconvert (>=6.2.0,<7)", "nbformat (>=5.0.5,<6)", "needle (>=0.5.0,<1)", "pillow (>=7.1.1,<8)"] -test = ["cryptography (>=36.0.0)", "freezegun (>=0.3.12)", "joblib (>=1.0.1)", "mock (>=1.0.1,<3.0.0)", "pandas (<2.0.0)", "parameterized (>=0.7.1,<0.9.0)", "psycopg2-binary (>=2.8.5,<3.0.0)", "pyhamcrest (>=1.9,!=1.10.0,<2.0.0)", "pytest (>=4.4.0,<5.0)", "pytest-timeout (>=1.3.3,<2)", "pytest-xdist (>=1.29.0,<2)", "pyyaml (>=3.12,<7.0.0)", "requests-mock (>=1.7,<2.0)", "scikit-learn (>=0.20.0)", "sqlalchemy (>=1.3,<2.0)", "tenacity (>=5.0.2,<6.0)", "testcontainers[mysql] (>=3.0.3,<4.0.0)"] - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "astunparse" -version = "1.6.3" -description = "An AST unparser for Python" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -six = ">=1.6.1,<2.0" -wheel = ">=0.23.0,<1.0" - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "audioread" -version = "3.0.0" -description = "multi-library, cross-platform audio decoding" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - -[[package]] -name = "beautifulsoup4" -version = "4.11.1" -description = "Screen-scraping library" -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "22.10.0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "brotli" -version = "1.0.9" -description = "Python bindings for the Brotli compression library" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "brotlicffi" -version = "1.0.9.2" -description = "Python CFFI bindings to the Brotli library" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -cffi = ">=1.0.0" - -[[package]] -name = "bs4" -version = "0.0.1" -description = "Dummy package for Beautiful Soup" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -beautifulsoup4 = "*" - -[[package]] -name = "cachecontrol" -version = "0.12.11" -description = "httplib2 caching for requests" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -lockfile = {version = ">=0.9", optional = true, markers = "extra == \"filecache\""} -msgpack = ">=0.5.2" -requests = "*" - -[package.extras] -filecache = ["lockfile (>=0.9)"] -redis = ["redis (>=2.10.5)"] - -[[package]] -name = "cachetools" -version = "5.2.0" -description = "Extensible memoizing collections and decorators" -category = "main" -optional = false -python-versions = "~=3.7" - -[[package]] -name = "cbor" -version = "1.0.0" -description = "RFC 7049 - Concise Binary Object Representation" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "certifi" -version = "2022.9.24" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.1.3" -description = "Composable command line interface toolkit" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "cloudpickle" -version = "2.2.0" -description = "Extended pickling support for Python objects" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" - -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - -[[package]] -name = "conllu" -version = "4.5.2" -description = "CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "crc32c" -version = "2.3" -description = "A python package implementing the crc32c algorithm in hardware and software" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "crcmod" -version = "1.7" -description = "CRC Generator" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "cyclonedx-python-lib" -version = "3.1.0" -description = "A library for producing CycloneDX SBOM (Software Bill of Materials) files." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -packageurl-python = ">=0.9" -setuptools = ">=47.0.0" -sortedcontainers = ">=2.4.0,<3.0.0" -toml = ">=0.10.0,<0.11.0" - -[[package]] -name = "datasets" -version = "2.7.1" -description = "HuggingFace community-driven open-source library of datasets" -category = "main" -optional = false -python-versions = ">=3.7.0" - -[package.dependencies] -aiohttp = "*" -dill = "<0.3.7" -fsspec = {version = ">=2021.11.1", extras = ["http"]} -huggingface-hub = ">=0.2.0,<1.0.0" -librosa = {version = "*", optional = true, markers = "extra == \"audio\""} -multiprocess = "*" -numpy = ">=1.17" -packaging = "*" -pandas = "*" -Pillow = {version = ">=6.2.1", optional = true, markers = "extra == \"vision\""} -pyarrow = ">=6.0.0" -pyyaml = ">=5.1" -requests = ">=2.19.0" -responses = "<0.19" -tqdm = ">=4.62.1" -xxhash = "*" - -[package.extras] -apache-beam = ["apache-beam (>=2.26.0)"] -audio = ["librosa"] -benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "transformers (==3.0.2)"] -dev = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (>=2.0.1)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "black (>=22.0,<23.0)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "fsspec[s3]", "isort (>=5.0.0)", "jiwer", "langdetect", "librosa", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "nltk", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (>=2021.11.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "spacy (>=3.0.0)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio (<0.12.0)", "transformers", "typer (<0.5.0)", "zstandard"] -docs = ["s3fs"] -quality = ["black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] -s3 = ["boto3", "botocore", "fsspec", "s3fs"] -tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] -tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (>=2.0.1)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "fsspec[s3]", "jiwer", "langdetect", "librosa", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "nltk", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (>=2021.11.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "spacy (>=3.0.0)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio (<0.12.0)", "transformers", "typer (<0.5.0)", "zstandard"] -torch = ["torch"] -vision = ["Pillow (>=6.2.1)"] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "dill" -version = "0.3.1.1" -description = "serialize all of python" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*" - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - -[[package]] -name = "dnspython" -version = "1.16.0" -description = "DNS toolkit" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -dnssec = ["ecdsa (>=0.13)", "pycryptodome"] -idna = ["idna (>=2.1)"] - -[[package]] -name = "docopt" -version = "0.6.2" -description = "Pythonic argument parser, that will make you smile" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "environs" -version = "9.5.0" -description = "simplified environment variable parsing" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -marshmallow = ">=3.0.0" -python-dotenv = "*" - -[package.extras] -dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] -tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] - -[[package]] -name = "et-xmlfile" -version = "1.1.0" -description = "An implementation of lxml.xmlfile for the standard library" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "exceptiongroup" -version = "1.0.4" -description = "Backport of PEP 654 (exception groups)" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "fastavro" -version = "1.7.0" -description = "Fast read/write of AVRO files" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -codecs = ["lz4", "python-snappy", "zstandard"] -lz4 = ["lz4"] -snappy = ["python-snappy"] -zstandard = ["zstandard"] - -[[package]] -name = "filelock" -version = "3.8.0" -description = "A platform independent file lock." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "flatbuffers" -version = "22.11.23" -description = "The FlatBuffers serialization format for Python" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "frozenlist" -version = "1.3.3" -description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "fsspec" -version = "2022.11.0" -description = "File-system specification" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} -requests = {version = "*", optional = true, markers = "extra == \"http\""} - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -entrypoints = ["importlib-metadata"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -tqdm = ["tqdm"] - -[[package]] -name = "gast" -version = "0.4.0" -description = "Python AST that abstracts the underlying Python version" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "gdown" -version = "4.5.3" -description = "Google Drive direct download of big files." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -beautifulsoup4 = "*" -filelock = "*" -requests = {version = "*", extras = ["socks"]} -six = "*" -tqdm = "*" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[[package]] -name = "google-auth" -version = "2.14.1" -description = "Google Authentication Library" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} -six = ">=1.9.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] - -[[package]] -name = "google-auth-oauthlib" -version = "0.4.6" -description = "Google Authentication Library" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -google-auth = ">=1.0.0" -requests-oauthlib = ">=0.7.0" - -[package.extras] -tool = ["click (>=6.0.0)"] - -[[package]] -name = "google-pasta" -version = "0.2.0" -description = "pasta is an AST-based Python refactoring library" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" - -[[package]] -name = "grpcio" -version = "1.50.0" -description = "HTTP/2-based RPC framework" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -six = ">=1.5.2" - -[package.extras] -protobuf = ["grpcio-tools (>=1.50.0)"] - -[[package]] -name = "h5py" -version = "3.7.0" -description = "Read and write HDF5 files from Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -numpy = ">=1.14.5" - -[[package]] -name = "hdfs" -version = "2.7.0" -description = "HdfsCLI: API and command line interface for HDFS." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -docopt = "*" -requests = ">=2.7.0" -six = ">=1.9.0" - -[package.extras] -avro = ["fastavro (>=0.21.19)"] -dataframe = ["fastavro (>=0.21.19)", "pandas (>=0.14.1)"] -kerberos = ["requests-kerberos (>=0.7.0)"] - -[[package]] -name = "html5lib" -version = "1.1" -description = "HTML parser based on the WHATWG HTML specification" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -six = ">=1.9" -webencodings = "*" - -[package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] - -[[package]] -name = "httplib2" -version = "0.20.4" -description = "A comprehensive HTTP client library." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} - -[[package]] -name = "huggingface-hub" -version = "0.11.0" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" -optional = false -python-versions = ">=3.7.0" - -[package.dependencies] -filelock = "*" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = "*" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] -torch = ["torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "importlib-metadata" -version = "5.1.0" -description = "Read metadata from Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "isort" -version = "5.10.1" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.6.1,<4.0" - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "joblib" -version = "1.2.0" -description = "Lightweight pipelining with Python functions" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "jsonlines" -version = "3.1.0" -description = "Library with helpers for the jsonlines file format" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -attrs = ">=19.2.0" - -[[package]] -name = "kenlm" -version = "0.0.0" -description = "" -category = "main" -optional = false -python-versions = "*" - -[package.source] -type = "url" -url = "https://github.com/kpu/kenlm/archive/master.zip" - -[[package]] -name = "keras" -version = "2.10.0" -description = "Deep learning for humans." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "keras-preprocessing" -version = "1.1.2" -description = "Easy data preprocessing and data augmentation for deep learning models" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -numpy = ">=1.9.1" -six = ">=1.9.0" - -[package.extras] -image = ["Pillow (>=5.2.0)", "scipy (>=0.14)"] -pep8 = ["flake8"] -tests = ["Pillow", "keras", "pandas", "pytest", "pytest-cov", "pytest-xdist", "tensorflow"] - -[[package]] -name = "kss" -version = "2.6.0" -description = "Korean sentence splitter" -category = "main" -optional = false -python-versions = ">=3" - -[[package]] -name = "libclang" -version = "14.0.6" -description = "Clang Python Bindings, mirrored from the official LLVM repo: https://github.com/llvm/llvm-project/tree/main/clang/bindings/python, to make the installation process easier." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "libcommon" -version = "0.5.0" -description = "Library for utils, common to all the services and workers" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -huggingface-hub = ">=0.11.0,<0.12.0" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -orjson = ">=3.6.4,<4.0.0" -psutil = ">=5.9.2,<6.0.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} - -[package.source] -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl" - -[[package]] -name = "librosa" -version = "0.9.2" -description = "Python module for audio and music processing" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -audioread = ">=2.1.9" -decorator = ">=4.0.10" -joblib = ">=0.14" -numba = ">=0.45.1" -numpy = ">=1.17.0" -packaging = ">=20.0" -pooch = ">=1.0" -resampy = ">=0.2.2" -scikit-learn = ">=0.19.1" -scipy = ">=1.2.0" -soundfile = ">=0.10.2" - -[package.extras] -display = ["matplotlib (>=3.3.0)"] -docs = ["ipython (>=7.0)", "matplotlib (>=3.3.0)", "mir-eval (>=0.5)", "numba (<0.50)", "numpydoc", "presets", "sphinx (!=1.3.1)", "sphinx-gallery (>=0.7)", "sphinx-multiversion (>=0.2.3)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)", "sphinxcontrib-svg2pdfconverter"] -tests = ["contextlib2", "matplotlib (>=3.3.0)", "pytest", "pytest-cov", "pytest-mpl", "samplerate", "soxr"] - -[[package]] -name = "llvmlite" -version = "0.39.1" -description = "lightweight wrapper around basic LLVM functionality" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "lm-dataformat" -version = "0.0.20" -description = "A utility for storing and reading files for LM training." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -jsonlines = "*" -ujson = "*" -zstandard = "*" - -[[package]] -name = "lockfile" -version = "0.12.2" -description = "Platform-independent file locking module" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "lxml" -version = "4.9.1" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] - -[[package]] -name = "markdown" -version = "3.4.1" -description = "Python implementation of Markdown." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -testing = ["coverage", "pyyaml"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "marshmallow" -version = "3.19.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mongo-types" -version = "0.15.1" -description = "Type stubs for mongoengine w/ basic support for bson and pymongo" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[[package]] -name = "mongoengine" -version = "0.24.2" -description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pymongo = ">=3.4,<5.0" - -[[package]] -name = "msgpack" -version = "1.0.4" -description = "MessagePack serializer" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "multidict" -version = "6.0.2" -description = "multidict implementation" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "multiprocess" -version = "0.70.9" -description = "better multiprocessing and multithreading in python" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -dill = ">=0.3.1" - -[[package]] -name = "multivolumefile" -version = "0.2.3" -description = "multi volume file wrapper library" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -check = ["check-manifest", "flake8", "flake8-black", "isort (>=5.0.3)", "pygments", "readme-renderer", "twine"] -test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "hypothesis", "pyannotate", "pytest", "pytest-cov"] -type = ["mypy", "mypy-extensions"] - -[[package]] -name = "mypy" -version = "0.812" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -typed-ast = ">=1.4.0,<1.5.0" -typing-extensions = ">=3.7.4" - -[package.extras] -dmypy = ["psutil (>=4.0)"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "nlp" -version = "0.4.0" -description = "HuggingFace/NLP is an open library of NLP datasets." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -dill = "*" -filelock = "*" -numpy = "*" -pandas = "*" -pyarrow = ">=0.16.0" -requests = ">=2.19.0" -tqdm = ">=4.27" -xxhash = "*" - -[package.extras] -apache-beam = ["apache-beam"] -dev = ["absl-py", "apache-beam", "black", "bs4", "elasticsearch", "faiss-cpu", "flake8 (==3.7.9)", "isort", "langdetect", "mwparserfromhell", "nltk", "pytest", "pytest-xdist", "tensorflow", "tldextract", "torch", "zstandard"] -docs = ["recommonmark", "sphinx", "sphinx-copybutton", "sphinx-markdown-tables", "sphinx-rtd-theme (==0.4.3)"] -quality = ["black", "flake8 (==3.7.9)", "isort"] -tensorflow = ["tensorflow (>=2.2.0)"] -tensorflow-gpu = ["tensorflow-gpu (>=2.2.0)"] -tests = ["absl-py", "apache-beam", "bs4", "elasticsearch", "faiss-cpu", "langdetect", "mwparserfromhell", "nltk", "pytest", "pytest-xdist", "tensorflow", "tldextract", "torch", "zstandard"] -torch = ["torch"] - -[[package]] -name = "nltk" -version = "3.7" -description = "Natural Language Toolkit" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "numba" -version = "0.56.4" -description = "compiling Python code using LLVM" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -llvmlite = ">=0.39.0dev0,<0.40" -numpy = ">=1.18,<1.24" -setuptools = "*" - -[[package]] -name = "numpy" -version = "1.22.4" -description = "NumPy is the fundamental package for array computing with Python." -category = "main" -optional = false -python-versions = ">=3.8" - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "openpyxl" -version = "3.0.10" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "opt-einsum" -version = "3.3.0" -description = "Optimizing numpys einsum function" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -numpy = ">=1.7" - -[package.extras] -docs = ["numpydoc", "sphinx (==1.2.3)", "sphinx-rtd-theme", "sphinxcontrib-napoleon"] -tests = ["pytest", "pytest-cov", "pytest-pep8"] - -[[package]] -name = "orjson" -version = "3.8.2" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "packageurl-python" -version = "0.10.4" -description = "A purl aka. Package URL parser and builder" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -build = ["wheel"] -test = ["black", "isort", "pytest"] - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pandas" -version = "1.5.2" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -numpy = {version = ">=1.20.3", markers = "python_version < \"3.10\""} -python-dateutil = ">=2.8.1" -pytz = ">=2020.1" - -[package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] - -[[package]] -name = "pathspec" -version = "0.10.2" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pbr" -version = "5.11.0" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" - -[[package]] -name = "pillow" -version = "9.3.0" -description = "Python Imaging Library (Fork)" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "pip" -version = "22.3.1" -description = "The PyPA recommended tool for installing Python packages." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pip-api" -version = "0.0.30" -description = "An unofficial, importable pip API" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pip = "*" - -[[package]] -name = "pip-audit" -version = "2.4.6" -description = "A tool for scanning Python environments for known vulnerabilities" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -CacheControl = {version = ">=0.12.10", extras = ["filecache"]} -cyclonedx-python-lib = ">=2.0.0,<2.5.0 || >2.5.0" -html5lib = ">=1.1" -packaging = ">=21.0.0" -pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" -resolvelib = ">=0.8.0" -rich = ">=12.4" -toml = ">=0.10" - -[package.extras] -dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] -test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] - -[[package]] -name = "pip-requirements-parser" -version = "31.2.0" -description = "pip requirements parser - a mostly correct pip requirements parsing library because it uses pip's own code." -category = "dev" -optional = false -python-versions = ">=3.6.*" - -[package.dependencies] -packaging = "*" - -[package.extras] -docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] - -[[package]] -name = "platformdirs" -version = "2.5.4" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "poetryup" -version = "0.3.15" -description = "Update dependencies and bump their version in the pyproject.toml file" -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -tomlkit = ">=0.7.2,<0.8.0" - -[[package]] -name = "pooch" -version = "1.6.0" -description = "\"Pooch manages your Python library's sample data files: it automatically downloads and stores them in a local directory, with support for versioning and corruption checks.\"" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -appdirs = ">=1.3.0" -packaging = ">=20.0" -requests = ">=2.19.0" - -[package.extras] -progress = ["tqdm (>=4.41.0,<5.0.0)"] -sftp = ["paramiko (>=2.7.0)"] -xxhash = ["xxhash (>=1.4.3)"] - -[[package]] -name = "proto-plus" -version = "1.22.1" -description = "Beautiful, Pythonic protocol buffers." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "3.19.6" -description = "Protocol Buffers" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "psutil" -version = "5.9.4" -description = "Cross-platform lib for process and system monitoring in Python." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "py7zr" -version = "0.17.4" -description = "Pure python 7-zip library" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -brotli = {version = ">=1.0.9", markers = "platform_python_implementation == \"CPython\""} -brotlicffi = {version = ">=1.0.9.2", markers = "platform_python_implementation == \"PyPy\""} -multivolumefile = ">=0.2.3" -pybcj = {version = ">=0.5.0", markers = "platform_python_implementation == \"CPython\""} -pycryptodomex = ">=3.6.6" -pyppmd = ">=0.17.0" -pyzstd = ">=0.14.4" -texttable = "*" - -[package.extras] -check = ["check-manifest", "flake8", "flake8-black", "flake8-deprecated", "isort (>=5.0.3)", "mypy (>=0.812)", "mypy-extensions (>=0.4.1)", "pygments", "readme-renderer", "twine"] -debug = ["pytest", "pytest-leaks", "pytest-profiling"] -docs = ["docutils", "sphinx (>=2.3)", "sphinx-a4doc", "sphinx-py3doc-enhanced-theme"] -test = ["coverage[toml] (>=5.2)", "coveralls (>=2.1.1)", "py-cpuinfo", "pyannotate", "pytest", "pytest-benchmark", "pytest-cov", "pytest-remotedata", "pytest-timeout"] -test-compat = ["libarchive-c"] - -[[package]] -name = "pyarrow" -version = "7.0.0" -description = "Python library for Apache Arrow" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pyasn1-modules" -version = "0.2.8" -description = "A collection of ASN.1-based protocols modules." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.5.0" - -[[package]] -name = "pybcj" -version = "1.0.1" -description = "bcj filter library" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -check = ["check-manifest", "flake8 (<5)", "flake8-black", "flake8-colors", "flake8-isort", "flake8-pyi", "flake8-typing-imports", "mypy (>=0.812)", "mypy-extensions (>=0.4.3)", "pygments", "readme-renderer"] -test = ["coverage[toml] (>=5.2)", "hypothesis", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pycryptodomex" -version = "3.15.0" -description = "Cryptographic library for Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pydot" -version = "1.4.2" -description = "Python interface to Graphviz's Dot" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -pyparsing = ">=2.1.4" - -[[package]] -name = "pydub" -version = "0.25.1" -description = "Manipulate audio with an simple and easy high level interface" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyicu" -version = "2.10.2" -description = "Python extension wrapping the ICU C++ API" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pymongo" -version = "3.13.0" -description = "Python driver for MongoDB <http://www.mongodb.org>" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} - -[package.extras] -aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] -gssapi = ["pykerberos"] -ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] -snappy = ["python-snappy"] -srv = ["dnspython (>=1.16.0,<1.17.0)"] -tls = ["ipaddress"] -zstd = ["zstandard"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyppmd" -version = "1.0.0" -description = "PPMd compression/decompression library" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -check = ["check-manifest", "flake8", "flake8-black", "isort (>=5.0.3)", "mypy (>=0.812)", "mypy-extensions (>=0.4.3)", "pygments", "readme-renderer"] -docs = ["sphinx (>=2.3)", "sphinx-rtd-theme"] -fuzzer = ["atheris", "hypothesis"] -test = ["coverage[toml] (>=5.2)", "hypothesis", "pytest (>=6.0)", "pytest-benchmark", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "pysocks" -version = "1.7.1" -description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "2.12.1" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -coverage = ">=5.2.1" -pytest = ">=4.6" -toml = "*" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "0.21.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pytz" -version = "2022.6" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pyzstd" -version = "0.15.3" -description = "Python bindings to Zstandard (zstd) compression library, the API is similar to Python's bz2/lzma/zlib modules." -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "rarfile" -version = "4.0" -description = "RAR archive reader for Python" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "regex" -version = "2022.10.31" -description = "Alternative regular expression module, to replace re." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "1.3.1" -description = "OAuthlib authentication support for Requests." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "resampy" -version = "0.4.2" -description = "Efficient signal resampling" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -numba = ">=0.53" -numpy = ">=1.17" - -[package.extras] -design = ["optuna (>=2.10.0)"] -docs = ["numpydoc", "sphinx (!=1.3.1)"] -tests = ["pytest (<8)", "pytest-cov", "scipy (>=1.0)"] - -[[package]] -name = "resolvelib" -version = "0.9.0" -description = "Resolve abstract dependencies into concrete ones" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -examples = ["html5lib", "packaging", "pygraphviz", "requests"] -lint = ["black", "flake8", "isort", "mypy", "types-requests"] -release = ["build", "towncrier", "twine"] -test = ["commentjson", "packaging", "pytest"] - -[[package]] -name = "responses" -version = "0.18.0" -description = "A utility library for mocking out the `requests` Python library." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -requests = ">=2.0,<3.0" -urllib3 = ">=1.25.10" - -[package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests"] - -[[package]] -name = "rich" -version = "12.6.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" -optional = false -python-versions = ">=3.6.3,<4.0.0" - -[package.dependencies] -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -category = "main" -optional = false -python-versions = ">=3.6,<4" - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "scikit-learn" -version = "1.1.3" -description = "A set of python modules for machine learning and data mining" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -joblib = ">=1.0.0" -numpy = ">=1.17.3" -scipy = ">=1.3.2" -threadpoolctl = ">=2.0.0" - -[package.extras] -benchmark = ["matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] -examples = ["matplotlib (>=3.1.2)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] -tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.2)", "mypy (>=0.961)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pyamg (>=4.0.0)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.16.2)"] - -[[package]] -name = "scipy" -version = "1.9.3" -description = "Fundamental algorithms for scientific computing in Python" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -numpy = ">=1.18.5,<1.26.0" - -[package.extras] -dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"] -doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"] -test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "setuptools" -version = "65.6.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "sklearn" -version = "0.0.post1" -description = "deprecated sklearn package, use scikit-learn instead" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "soundfile" -version = "0.11.0" -description = "An audio library based on libsndfile, CFFI and NumPy" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -cffi = ">=1.0" - -[package.extras] -numpy = ["numpy"] - -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "stevedore" -version = "4.1.1" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - -[[package]] -name = "tensorboard" -version = "2.10.1" -description = "TensorBoard lets you watch Tensors Flow" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -absl-py = ">=0.4" -google-auth = ">=1.6.3,<3" -google-auth-oauthlib = ">=0.4.1,<0.5" -grpcio = ">=1.24.3" -markdown = ">=2.6.8" -numpy = ">=1.12.0" -protobuf = ">=3.9.2,<3.20" -requests = ">=2.21.0,<3" -setuptools = ">=41.0.0" -tensorboard-data-server = ">=0.6.0,<0.7.0" -tensorboard-plugin-wit = ">=1.6.0" -werkzeug = ">=1.0.1" -wheel = ">=0.26" - -[[package]] -name = "tensorboard-data-server" -version = "0.6.1" -description = "Fast data loading for TensorBoard" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "tensorboard-plugin-wit" -version = "1.8.1" -description = "What-If Tool TensorBoard plugin." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "tensorflow" -version = "2.10.1" -description = "TensorFlow is an open source machine learning framework for everyone." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -absl-py = ">=1.0.0" -astunparse = ">=1.6.0" -flatbuffers = ">=2.0" -gast = ">=0.2.1,<=0.4.0" -google-pasta = ">=0.1.1" -grpcio = ">=1.24.3,<2.0" -h5py = ">=2.9.0" -keras = ">=2.10.0,<2.11" -keras-preprocessing = ">=1.1.1" -libclang = ">=13.0.0" -numpy = ">=1.20" -opt-einsum = ">=2.3.2" -packaging = "*" -protobuf = ">=3.9.2,<3.20" -setuptools = "*" -six = ">=1.12.0" -tensorboard = ">=2.10,<2.11" -tensorflow-estimator = ">=2.10.0,<2.11" -tensorflow-io-gcs-filesystem = ">=0.23.1" -termcolor = ">=1.1.0" -typing-extensions = ">=3.6.6" -wrapt = ">=1.11.0" - -[[package]] -name = "tensorflow-estimator" -version = "2.10.0" -description = "TensorFlow Estimator." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tensorflow-io-gcs-filesystem" -version = "0.28.0" -description = "TensorFlow IO" -category = "main" -optional = false -python-versions = ">=3.7, <3.11" - -[package.extras] -tensorflow = ["tensorflow (>=2.11.0,<2.12.0)"] -tensorflow-aarch64 = ["tensorflow-aarch64 (>=2.11.0,<2.12.0)"] -tensorflow-cpu = ["tensorflow-cpu (>=2.11.0,<2.12.0)"] -tensorflow-gpu = ["tensorflow-gpu (>=2.11.0,<2.12.0)"] -tensorflow-rocm = ["tensorflow-rocm (>=2.11.0,<2.12.0)"] - -[[package]] -name = "tensorflow-macos" -version = "2.10.0" -description = "TensorFlow is an open source machine learning framework for everyone." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -absl-py = ">=1.0.0" -astunparse = ">=1.6.0" -flatbuffers = ">=2.0" -gast = ">=0.2.1,<=0.4.0" -google-pasta = ">=0.1.1" -grpcio = ">=1.24.3,<2.0" -h5py = ">=2.9.0" -keras = ">=2.10.0,<2.11" -keras-preprocessing = ">=1.1.1" -libclang = ">=13.0.0" -numpy = ">=1.20" -opt-einsum = ">=2.3.2" -packaging = "*" -protobuf = ">=3.9.2,<3.20" -setuptools = "*" -six = ">=1.12.0" -tensorboard = ">=2.10,<2.11" -tensorflow-estimator = ">=2.10.0,<2.11" -termcolor = ">=1.1.0" -typing-extensions = ">=3.6.6" -wrapt = ">=1.11.0" - -[[package]] -name = "termcolor" -version = "2.1.1" -description = "ANSI color formatting for output in terminal" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "texttable" -version = "1.6.7" -description = "module to create simple ASCII tables" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "tfrecord" -version = "1.14.1" -description = "TFRecord reader" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -crc32c = "*" -numpy = "*" -protobuf = "*" - -[[package]] -name = "threadpoolctl" -version = "3.1.0" -description = "threadpoolctl" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "tokenizers" -version = "0.13.2" -description = "Fast and Customizable Tokenizers" -category = "main" -optional = false -python-versions = "*" - -[package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tomlkit" -version = "0.7.2" -description = "Style preserving TOML library" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "torch" -version = "1.10.2" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -category = "main" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -typing-extensions = "*" - -[[package]] -name = "torchaudio" -version = "0.10.2" -description = "An audio package for PyTorch" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -torch = "1.10.2" - -[[package]] -name = "tqdm" -version = "4.64.1" -description = "Fast, Extensible Progress Meter" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "transformers" -version = "4.24.0" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" -optional = false -python-versions = ">=3.7.0" - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.10.0,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.10.0)"] -all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.10.0)", "deepspeed (>=0.6.5)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.6.5)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "pyknp (>=0.6.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (==22.3)", "datasets (!=2.5.0)", "flake8 (>=3.8.3)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)"] -ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -tf = ["onnxconverter-common", "tensorflow (>=2.4)", "tensorflow-text", "tf2onnx"] -tf-cpu = ["onnxconverter-common", "tensorflow-cpu (>=2.3)", "tensorflow-text", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["torch (>=1.7,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torchhub = ["filelock", "huggingface-hub (>=0.10.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "tqdm (>=4.27)"] -vision = ["Pillow"] - -[[package]] -name = "trec-car-tools" -version = "2.5.4" -description = "Support tools for TREC CAR participants. Also see trec-car.cs.unh.edu" -category = "main" -optional = false -python-versions = ">=3.6" -develop = false - -[package.dependencies] -cbor = ">=1.0.0" -numpy = ">=1.11.2" - -[package.source] -type = "directory" -url = "../../vendors/trec-car-tools/python3" - -[[package]] -name = "typed-ast" -version = "1.4.3" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "typer" -version = "0.4.2" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -click = ">=7.1.1,<9.0.0" - -[package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=5.2,<6.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "shellingham (>=1.3.0,<2.0.0)"] - -[[package]] -name = "types-requests" -version = "2.28.11.5" -description = "Typing stubs for requests" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-urllib3" -version = "1.26.25.4" -description = "Typing stubs for urllib3" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "ujson" -version = "5.5.0" -description = "Ultra fast JSON encoder and decoder for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "werkzeug" -version = "2.2.2" -description = "The comprehensive WSGI web application library." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "wget" -version = "3.2" -description = "pure python download utility" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "wheel" -version = "0.38.4" -description = "A built-package format for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=3.0.0)"] - -[[package]] -name = "wrapt" -version = "1.14.1" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "xxhash" -version = "3.1.0" -description = "Python binding for xxHash" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "yarl" -version = "1.8.1" -description = "Yet another URL library" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zipp" -version = "3.10.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[[package]] -name = "zstandard" -version = "0.19.0" -description = "Zstandard bindings for Python" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} - -[package.extras] -cffi = ["cffi (>=1.11)"] - -[metadata] -lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "152c9759656a3e25a2a16751584007e49545a1a227320371fec492a39e29ae59" - -[metadata.files] -absl-py = [ - {file = "absl-py-1.3.0.tar.gz", hash = "sha256:463c38a08d2e4cef6c498b76ba5bd4858e4c6ef51da1a5a1f27139a022e20248"}, - {file = "absl_py-1.3.0-py3-none-any.whl", hash = "sha256:34995df9bd7a09b3b8749e230408f5a2a2dd7a68a0d33c12a3d0cb15a041a507"}, -] -aiohttp = [ - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"}, - {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"}, - {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"}, - {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"}, - {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"}, - {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"}, - {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"}, - {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"}, - {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"}, - {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"}, - {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"}, - {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"}, -] -aiosignal = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] -apache-beam = [ - {file = "apache-beam-2.41.0.zip", hash = "sha256:c1a0456a5b48c3481bf20dc904e4d812515144336873b322f17ba188e2fabd92"}, - {file = "apache_beam-2.41.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8fe21bf02554ad6ca1c1b19d37afc0c08b9d0676fb4b5a9b1f4a17303edd94d4"}, - {file = "apache_beam-2.41.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2b3a06b0cb73a2e1b5ad892c3fab36bd8454ac8abee3cae23832c03ab1cc7121"}, - {file = "apache_beam-2.41.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:82faeb079d9612918b0ad7cbd12fa54ca56b6cb8175c043804446eb2744d965b"}, - {file = "apache_beam-2.41.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ea0f500e16ffb0e5932c802abd301dc042e88ef27ded0935b9b8cac58113c43c"}, - {file = "apache_beam-2.41.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e4c101f8c1427ced3c17525540c81d05137415dc4398d5d51df4c70608aee46d"}, - {file = "apache_beam-2.41.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:615612971a6aeb15eb41697945a25bb726f6f7410c71ce029fe3196cdf486edb"}, - {file = "apache_beam-2.41.0-cp37-cp37m-win32.whl", hash = "sha256:2f9395f2faefaa28306081e8fc7ebc5e7157f6734001bfcf9233c077e3d8b2ca"}, - {file = "apache_beam-2.41.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bb216e5890279988c490428cfbce62a03fcdd90d6feca13ec29a074c8c3cfe36"}, - {file = "apache_beam-2.41.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cbad1f7d207224efbe8e461ceacf8c21e21e4a5a011cde87e5881649e24b5e4e"}, - {file = "apache_beam-2.41.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dab8ee4b15cc2608bf5a715167150210246924cca65ddc6847afa21f5211c22f"}, - {file = "apache_beam-2.41.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cca61ef1cc417ce2eca8f331fa0f8f9bbceaf6e67460a048527ebd1c33562d16"}, - {file = "apache_beam-2.41.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c25ab457f4ebe356fe3726c5e7554ca29e975bf5df67ff20e339fc5e568ed550"}, - {file = "apache_beam-2.41.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b7c8ca34772c26cd3103b36b69f2c31fa834ac5bb85859ac9dd51b64a2100b5b"}, - {file = "apache_beam-2.41.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9abcb01b85fd27eaac29ea90757da1d95e293ca2fdaf6b69192020ee05d71257"}, - {file = "apache_beam-2.41.0-cp38-cp38-win32.whl", hash = "sha256:7b6581739ea8d5a346b4a722d1d280adf748d74a5c31322288a0fa9ba3204645"}, - {file = "apache_beam-2.41.0-cp38-cp38-win_amd64.whl", hash = "sha256:7c2ab828a7a3b8973f5f01101fd7746a8562a20f5f390b07e301744afa2a83d7"}, - {file = "apache_beam-2.41.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e28758eec094b7f5585e92d8a6f9b5745a6b335d646b8fd58b6dd7f99109e67"}, - {file = "apache_beam-2.41.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ea94024188e8aa1eb9774be66cf368d44c08cf3b34626fca4803bb33c353b72d"}, - {file = "apache_beam-2.41.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:06da861c4092f64ed9868375e8049ae26b3208c105f3f93268eacd3c7a35e1b9"}, - {file = "apache_beam-2.41.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e511f2cf7d767810ad51ddca72ab93992d0bbd310984f36d5a5659276f3e5e98"}, - {file = "apache_beam-2.41.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:4777dbbb0ed371cf7c72d784acdefba5963d61bac11a3b62875b5817fad2d608"}, - {file = "apache_beam-2.41.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf3e5d838122d8fdf8fc1a1752ff2b661ad5a0641bb62dc227e433b557022acb"}, - {file = "apache_beam-2.41.0-cp39-cp39-win32.whl", hash = "sha256:fa9d2f4ce10662950fa9bec37295d8c1a50dbd4b558ea61693c6bd455d7db790"}, - {file = "apache_beam-2.41.0-cp39-cp39-win_amd64.whl", hash = "sha256:f978f7a815e2ee00c9bcec756b1aa7114ab4ba4c572978a48610f1bed6bc5e35"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -astunparse = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -audioread = [ - {file = "audioread-3.0.0.tar.gz", hash = "sha256:121995bd207eb1fda3d566beb851d3534275925bc35a4fb6da0cb11de0f7251a"}, -] -bandit = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -brotli = [ - {file = "Brotli-1.0.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70"}, - {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b"}, - {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6"}, - {file = "Brotli-1.0.9-cp27-cp27m-win32.whl", hash = "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa"}, - {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452"}, - {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7"}, - {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031"}, - {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43"}, - {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"}, - {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"}, - {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"}, - {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"}, - {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"}, - {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"}, - {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"}, - {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"}, - {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a"}, - {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df"}, - {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad"}, - {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde"}, - {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a"}, - {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f"}, - {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d"}, - {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f"}, - {file = "Brotli-1.0.9-cp311-cp311-win32.whl", hash = "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d"}, - {file = "Brotli-1.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679"}, - {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"}, - {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"}, - {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"}, - {file = "Brotli-1.0.9-cp35-cp35m-win32.whl", hash = "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1"}, - {file = "Brotli-1.0.9-cp35-cp35m-win_amd64.whl", hash = "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea"}, - {file = "Brotli-1.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f"}, - {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"}, - {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"}, - {file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"}, - {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"}, - {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"}, - {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"}, - {file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"}, - {file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"}, - {file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"}, - {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"}, - {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"}, - {file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"}, - {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"}, - {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"}, - {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"}, - {file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"}, - {file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"}, - {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"}, - {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8"}, - {file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"}, - {file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"}, - {file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"}, - {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"}, - {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"}, - {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"}, - {file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"}, - {file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"}, - {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"}, - {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"}, - {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"}, - {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"}, - {file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"}, - {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"}, - {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"}, - {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"}, - {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"}, - {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"}, - {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"}, - {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7"}, - {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019"}, - {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"}, - {file = "Brotli-1.0.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8"}, - {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337"}, - {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be"}, - {file = "Brotli-1.0.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a"}, - {file = "Brotli-1.0.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be"}, - {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a"}, - {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7"}, - {file = "Brotli-1.0.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755"}, - {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"}, -] -brotlicffi = [ - {file = "brotlicffi-1.0.9.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:408ec4359f9763280d5c4e0ad29c51d1240b25fdd18719067e972163b4125b98"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2e4629f7690ded66c8818715c6d4dd6a7ff6a4f10fad6186fe99850f781ce210"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:137c4635edcdf593de5ce9d0daa596bf499591b16b8fca5fd72a490deb54b2ee"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:af8a1b7bcfccf9c41a3c8654994d6a81821fdfe4caddcfe5045bfda936546ca3"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9078432af4785f35ab3840587eed7fb131e3fc77eb2a739282b649b343c584dd"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7bb913d5bf3b4ce2ec59872711dc9faaff5f320c3c3827cada2d8a7b793a7753"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:16a0c9392a1059e2e62839fbd037d2e7e03c8ae5da65e9746f582464f7fab1bb"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:94d2810efc5723f1447b332223b197466190518a3eeca93b9f357efb5b22c6dc"}, - {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:9e70f3e20f317d70912b10dbec48b29114d3dbd0e9d88475cb328e6c086f0546"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:586f0ea3c2eed455d5f2330b9ab4a591514c8de0ee53d445645efcfbf053c69f"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux1_i686.whl", hash = "sha256:4454c3baedc277fd6e65f983e3eb8e77f4bc15060f69370a0201746e2edeca81"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:52c1c12dad6eb1d44213a0a76acf5f18f64653bd801300bef5e2f983405bdde5"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:21cd400d24b344c218d8e32b394849e31b7c15784667575dbda9f65c46a64b0a"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:71061f8bc86335b652e442260c4367b782a92c6e295cf5a10eff84c7d19d8cf5"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:15e0db52c56056be6310fc116b3d7c6f34185594e261f23790b2fb6489998363"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-win32.whl", hash = "sha256:551305703d12a2dd1ae43d3dde35dee20b1cb49b5796279d4d34e2c6aec6be4d"}, - {file = "brotlicffi-1.0.9.2-cp35-abi3-win_amd64.whl", hash = "sha256:2be4fb8a7cb482f226af686cd06d2a2cab164ccdf99e460f8e3a5ec9a5337da2"}, - {file = "brotlicffi-1.0.9.2-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:8e7221d8a084d32d15c7b58e0ce0573972375c5038423dbe83f217cfe512e680"}, - {file = "brotlicffi-1.0.9.2-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:75a46bc5ed2753e1648cc211dcb2c1ac66116038766822dc104023f67ff4dfd8"}, - {file = "brotlicffi-1.0.9.2-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1e27c43ef72a278f9739b12b2df80ee72048cd4cbe498f8bbe08aaaa67a5d5c8"}, - {file = "brotlicffi-1.0.9.2-pp27-pypy_73-win32.whl", hash = "sha256:feb942814285bdc5e97efc77a04e48283c17dfab9ea082d79c0a7b9e53ef1eab"}, - {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6208d82c3172eeeb3be83ed4efd5831552c7cd47576468e50fcf0fb23fcf97f"}, - {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:408c810c599786fb806556ff17e844a903884e6370ca400bcec7fa286149f39c"}, - {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a73099858ee343e8801710a08be8d194f47715ff21e98d92a19ac461058f52d1"}, - {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-win32.whl", hash = "sha256:916b790f967a18a595e61f218c252f83718ac91f24157d622cf0fa710cd26ab7"}, - {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba4a00263af40e875ec3d6c7f623cbf8c795b55705da18c64ec36b6bf0848bc5"}, - {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:df78aa47741122b0d5463f1208b7bb18bc9706dee5152d9f56e0ead4865015cd"}, - {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:9030cd5099252d16bfa4e22659c84a89c102e94f8e81d30764788b72e2d7cfb7"}, - {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:7e72978f4090a161885b114f87b784f538dcb77dafc6602592c1cf39ae8d243d"}, - {file = "brotlicffi-1.0.9.2.tar.gz", hash = "sha256:0c248a68129d8fc6a217767406c731e498c3e19a7be05ea0a90c3c86637b7d96"}, -] -bs4 = [ - {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"}, -] -cachecontrol = [ - {file = "CacheControl-0.12.11-py2.py3-none-any.whl", hash = "sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b"}, - {file = "CacheControl-0.12.11.tar.gz", hash = "sha256:a5b9fcc986b184db101aa280b42ecdcdfc524892596f606858e0b7a8b4d9e144"}, -] -cachetools = [ - {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, - {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, -] -cbor = [ - {file = "cbor-1.0.0.tar.gz", hash = "sha256:13225a262ddf5615cbd9fd55a76a0d53069d18b07d2e9f19c39e6acb8609bbb6"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -cloudpickle = [ - {file = "cloudpickle-2.2.0-py3-none-any.whl", hash = "sha256:7428798d5926d8fcbfd092d18d01a2a03daf8237d8fcdc8095d256b8490796f0"}, - {file = "cloudpickle-2.2.0.tar.gz", hash = "sha256:3f4219469c55453cfe4737e564b67c2a149109dabf7f242478948b895f61106f"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -commonmark = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] -conllu = [ - {file = "conllu-4.5.2-py2.py3-none-any.whl", hash = "sha256:660e7305b25d1993404e17197c1a17a08f2214ae780d9a7d69361274aaea260d"}, - {file = "conllu-4.5.2.tar.gz", hash = "sha256:7c581c0d12fcdd546cbf69050063c37312de28dd3048c3f144ec5b851e71891c"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -crc32c = [ - {file = "crc32c-2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82942ed343e5c884b5c0c9aa6bb5bb47de0247df95ce5d154cc48744d5c2ffd4"}, - {file = "crc32c-2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f641a9bd24a309637cca6c119b8aabdfe6d41bab5ea630124ee9be7891e36ba1"}, - {file = "crc32c-2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:374d288cc1735932276bc65670db329dd9fe2af4ec323599dc40e1212b13985e"}, - {file = "crc32c-2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b7c71a3ae1511c42b7919e6116560c08ba89479ea249f281c5bfba2b619411d"}, - {file = "crc32c-2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f524fd202472d041b9bddb4a51b5fff28767a9c69953dbcdeecc67ef65707c07"}, - {file = "crc32c-2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a070dbe10dac29c2f591a59300c37448e3c7a747b6ea18d4826b7c94a956bd"}, - {file = "crc32c-2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ab9df0bd9bf10f3d5bd346321d48da8a28392b1f48f7a6fa3234acebe6ee448"}, - {file = "crc32c-2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8948a9262d36e2aad3be74aac3ce7a1b090ab2361f7619b3f23418fa536f1b25"}, - {file = "crc32c-2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:865bf66d86809971d4856e38085a4a15a7251b8e780f22ad52e12b50784dac25"}, - {file = "crc32c-2.3-cp310-cp310-win32.whl", hash = "sha256:e14f4d57e004fa5a6100ea3aeb9574bee6f95965a96a382154fa40aee1fdeb5e"}, - {file = "crc32c-2.3-cp310-cp310-win_amd64.whl", hash = "sha256:ca03d8d5b35a26e0d3eb8c7121de3e37a59042735029eabcf1c4b15343f82cdd"}, - {file = "crc32c-2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5612be1606eec55511ade38deec40c9f1c7647ec0407a4031e0a2e6e6a635f27"}, - {file = "crc32c-2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab21f02c13dc5a0411838d0709cb4d24bcb865ea28b683b7403826c08d14e27"}, - {file = "crc32c-2.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c1f3e28b8aec8a0f7727337fafa31f0ace38e59e054c51fecb923535c6dc6e6"}, - {file = "crc32c-2.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed14214fcc1416e0dc63be4c88aad7f58e0f0cb2c22d578b861e8fc19d1b2d2f"}, - {file = "crc32c-2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1d334d51d395f78fb649e8442341da782e63d3f9552fcfbc040995d24d4b794d"}, - {file = "crc32c-2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5ddf91756d6275f497d0895b8875d1f1fdac6be08a5900f4123ede2c91cd1422"}, - {file = "crc32c-2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5aa6383c0a13a542c3f1eb82a02e29c1141e0a2bc63faedd0062d1c41649989f"}, - {file = "crc32c-2.3-cp36-cp36m-win32.whl", hash = "sha256:ef1165f7f36edaae03fcf03f1ca3bdbf196a5255d656bfb17959ba0405a2c8ee"}, - {file = "crc32c-2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f1679f7f700f2aec3dbee4e357a2fdde53e2ec151dde4e0b52a9205fac273a90"}, - {file = "crc32c-2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c04a27ba3cbc7a9e34c77f402bd3a83442a2c7acd3897d2539b1a3321ed28a6a"}, - {file = "crc32c-2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51ac079c44297bbf624a598cffe6f85bd0a5faf780fd75d2d5e531d42d427ef"}, - {file = "crc32c-2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb1fea3d9ec71f353a6c38648d074e722fff1f43c1998ae6088dbee324a1ca6"}, - {file = "crc32c-2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b917b73d810bcdbcd1461978ba55038dcf2bbc3b56704b0082d2f9b0d5edc7ad"}, - {file = "crc32c-2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0369e637d13db5c06e45a34b069ff2ba292ac881e8a44a8658ccf3edaa9c392f"}, - {file = "crc32c-2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:47088e524a9ec2887ae0ec519d75df40f005debf9d52f10e688f27e7cc0d339c"}, - {file = "crc32c-2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fddf16ed92dcb8ee34a12bd0757d5719d3c750a9dc813d82972477885b114339"}, - {file = "crc32c-2.3-cp37-cp37m-win32.whl", hash = "sha256:3f372a53e9cf2464421b82b41fb66d98f654284c8fc4363f51bb0f5485fdc2b4"}, - {file = "crc32c-2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4d223e844ee61ac492f0197b62ccc2a9c23db15e4d2938e698fec6eded0daf15"}, - {file = "crc32c-2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4323f56908b7e5cea039122aad039fcf750974b09e4f993244d4dddb24cab561"}, - {file = "crc32c-2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fac1b4248625acd65985378f6b34a00b73cfc9db5b8ccc73101744de2e3dfa66"}, - {file = "crc32c-2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9ce72a40c17636af97e37bad2f2c11a2e740f57d4051ef586c04d1aa83db8b38"}, - {file = "crc32c-2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9bc7e5599f5970fff1f9aa551639336a76d1bb1fb00f0b87704049df8ba035"}, - {file = "crc32c-2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:682974e2cfb199ebc4adc5eb4d493dbcf83812a031a8ecccae5a7b5bcade5d9f"}, - {file = "crc32c-2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:255e35719c252ce7609cb3f1c5a045783a6e0d6d7b035d507ddd82d5194c236a"}, - {file = "crc32c-2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:df19ab6ab3884a237388c7720b1fe617dd4893305f62383d0f96fc7980dfdf7c"}, - {file = "crc32c-2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:61479a60d5a2b3160a4ae17b37df119963a741fd61ca71d4792670cdf7d7ea41"}, - {file = "crc32c-2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e6e16d57b8103fee9fdecb38e908d9ceb70d2196bb932dba64bf7b570f44c0b9"}, - {file = "crc32c-2.3-cp38-cp38-win32.whl", hash = "sha256:ad83e4c78379cc3e22b760e9874bc57f91a9cfb85107ccba1c6442bc1a2e2a1c"}, - {file = "crc32c-2.3-cp38-cp38-win_amd64.whl", hash = "sha256:32c573dd861933e2390932cc10e1b78d71ee7827ee4dfcec96e23cf007a1a6d3"}, - {file = "crc32c-2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ad57917650af59c989b62184fc4604d6c5066fc030ced4c6e07a596000f1ab86"}, - {file = "crc32c-2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5e076ae46ac0e4e28eb43932c5c0b8e1b8751bb7d1b0d239f18230aed7cca3bf"}, - {file = "crc32c-2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:896bda76db13f229c1126d5e384673f78e06685e70d76fff4c5a3f65b4068b4d"}, - {file = "crc32c-2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bc2a9ccfa7c02bb8a5346fd546b65ed265965e7fea768c7f2681f2b68d6a0"}, - {file = "crc32c-2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6872d8728f30f2a13f95762801428cf92a7ee6f170c872be81a17b1549b69131"}, - {file = "crc32c-2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:327e44184826cd1c72bcd4a9b2c4badfd29501333e158460c7d3ad8b7f066588"}, - {file = "crc32c-2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:866d1cbe646bdef67fc225371da265f081809bcf238bf562d6874c97e7fcb0d6"}, - {file = "crc32c-2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c59c6ea67ab927b2ab958c7b01a6b17c9cad882e7a1da51b9c35fbc9874ff46a"}, - {file = "crc32c-2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27116037f97a02f1a123ca82008ee993c28afe8590e047a6cd86aca33653cca"}, - {file = "crc32c-2.3-cp39-cp39-win32.whl", hash = "sha256:90c46644225dc7f71b4dd499ed71ada59d061fd60aa55233270d088ee8cfcd13"}, - {file = "crc32c-2.3-cp39-cp39-win_amd64.whl", hash = "sha256:a2427a9196c2b8b1c27d7e31cc5c9fff13af0b1411ff1565459f65554990f055"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5a13d41a29d3feea5ba87def9d4dccc3362139345a24997de33fad00b656622b"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8363b553b33719b37fff46378a6e96106fd9232d2e043eebb6c6da46925c7663"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ec3d9257d0624fb74335f67592b6a30de5e0cfb60322ed8682e35820decac8f"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d82fa5bb0661a7a508e62730d4d9045f53d4ab6a9211b560a014f1d58a8337cb"}, - {file = "crc32c-2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5f347244590f294eaea2e92546100bd56db926305e0603a0d57a88e59f86b308"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dce1deda03c6dbe0f5ae6e3e0f8671caead64075fd19a61b1700d42a88af97c8"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7d568eb07473d9bc6fb413a4d3248265212c537b80d494ab884cc5316589110"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5560faa3f673183eb1e2fc2c1361cc9ab86865a1d5774baf61fec9ca6c1a696"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8067ce072908626869b583700da6b4bfc9a538975d77232ae68a31d8af5f1ff6"}, - {file = "crc32c-2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:250af144edce7850a35c618b4dd1bf56436e031560228c17a7c78bf29239ceb0"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4ac8738e9cd28948e40fb3a3c89a44660e4ad266f7726964200224e101f5c8ef"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c74d81a00972cbe65e27e99838b44ed5e04bced971e5bfa01c27a4bd17138442"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a423c098ceffbd70544d1de3e00eeb45ec4b8463ab5d8005389fbbf3243314d1"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04c44ad7cde9c21ad426bdfa675ba7039db82a6961c99690f9d2ff2f034c892"}, - {file = "crc32c-2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cea0fe7053e36a4809e5bf95989552f52c98bbc94dca9062fb5b8c976daa0f32"}, - {file = "crc32c-2.3.tar.gz", hash = "sha256:17ce6c596ad0d53df52dcd72defb66984aeabd98fbefea7ba848a6b6bdece36a"}, -] -crcmod = [ - {file = "crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e"}, -] -cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, -] -datasets = [ - {file = "datasets-2.7.1-py3-none-any.whl", hash = "sha256:3d0d2e860cec7c4e77c40de64533d46853f939b6e2311cba4f483f000afae868"}, - {file = "datasets-2.7.1.tar.gz", hash = "sha256:1c79a982d9d9c75fbbaea5b177c2b4c56894289b647fa2845ae2ebd8ac638a0f"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -dill = [ - {file = "dill-0.3.1.1.tar.gz", hash = "sha256:42d8ef819367516592a825746a18073ced42ca169ab1f5f4044134703e7a049c"}, -] -dnspython = [ - {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, - {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, -] -docopt = [ - {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, -] -environs = [ - {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, - {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, -] -et-xmlfile = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] -exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, -] -fastavro = [ - {file = "fastavro-1.7.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ab3387a06e272980fa034f5c62f7063977b77df6416d3d30a4d3b49cc8827566"}, - {file = "fastavro-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:216132bc54da19e97e1531dd69c86282408d4c797749d83b01b3a00862a180de"}, - {file = "fastavro-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c20cf6cd8098bb93c2cffd53d03ccea1dcf9ec594a5c83963acf29a2882f8693"}, - {file = "fastavro-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:54b60e79506a456bcfc940560fa2c73a7a8e3ddc58a1ae4d94fdd99f6b02aef0"}, - {file = "fastavro-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4da6d83abd04a804310667f8d1fc23d44e9ed91ed9a9bc9c1fcd906e0c403b12"}, - {file = "fastavro-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c664302f94064adeb93403c61c74e07b9710526403eba3b59169f99bb99c55c"}, - {file = "fastavro-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aab162f02e64bf82d0282430a3c6ec7a36982b1c5d479e7dcc278e6d62a84b8"}, - {file = "fastavro-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:63d0d2a2bb3e85d006c834633be51b105a50b0dc7cc8423b06f30601b532adf4"}, - {file = "fastavro-1.7.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:a07a3245049529d6d9028d664361cc4990e74d035d2303875295e2f7b97eba09"}, - {file = "fastavro-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7df57d31cb58770a9066790250e9f4ec91242c69e1dc62ea732a6fb2406a8f96"}, - {file = "fastavro-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b905634b5763ca08c9f7b51486e2c3ae7907f5d9bc48208c69b16ccbe8455e90"}, - {file = "fastavro-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:749206f1cec3d7429546e49db5708f4571497d35181b6b334c4844133f230515"}, - {file = "fastavro-1.7.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:a0ebd5c1269085179de4b3f072de274fb66a471ecbc5245bd8684e6f94943c2f"}, - {file = "fastavro-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7e35b94b692f8cca0096c89abf1937efed66252dea0b3b3165babfb3c289fb7"}, - {file = "fastavro-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202424a5a7831b773f0f2cc2f82e89ed1726051fd5994f13dc678514144e10d4"}, - {file = "fastavro-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:601f8c2ec166bd721f4b12eafe195dd1373d3f8dce4fe2425abd2df3e3968ac7"}, - {file = "fastavro-1.7.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:91cc9be740abca894082af4fe3ab9db057d4e5fa783cfa9a94c02ec041bf4346"}, - {file = "fastavro-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e4463b00242e4baf52d499aeefab46a26d9dd18d808d4219cd4d21089da540e"}, - {file = "fastavro-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7348d318858203bd108e6bcde177d8a6f0590b52bc624d815f26fb6c37029bb"}, - {file = "fastavro-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:492e8902063aa1c73170e11c18495fcaa86b71eae3513ef83ba438ca02b16b34"}, - {file = "fastavro-1.7.0.tar.gz", hash = "sha256:4b1205f46489b4032d3155c1ab44d9824be0c7454df98d3a5bd22b78b98f23c8"}, -] -filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, -] -flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] -flatbuffers = [ - {file = "flatbuffers-22.11.23-py2.py3-none-any.whl", hash = "sha256:13043a5deba77e55b73064750195d2c5b494754d52b7d4ad01bc52cad5c3c9f2"}, - {file = "flatbuffers-22.11.23.tar.gz", hash = "sha256:2a82b85eea7f6712ab41077086dae1a89382862fe64414c8ebdf976123d1a095"}, -] -frozenlist = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, -] -fsspec = [ - {file = "fsspec-2022.11.0-py3-none-any.whl", hash = "sha256:d6e462003e3dcdcb8c7aa84c73a228f8227e72453cd22570e2363e8844edfe7b"}, - {file = "fsspec-2022.11.0.tar.gz", hash = "sha256:259d5fd5c8e756ff2ea72f42e7613c32667dc2049a4ac3d84364a7ca034acb8b"}, -] -gast = [ - {file = "gast-0.4.0-py3-none-any.whl", hash = "sha256:b7adcdd5adbebf1adf17378da5ba3f543684dbec47b1cda1f3997e573cd542c4"}, - {file = "gast-0.4.0.tar.gz", hash = "sha256:40feb7b8b8434785585ab224d1568b857edb18297e5a3047f1ba012bc83b42c1"}, -] -gdown = [ - {file = "gdown-4.5.3.tar.gz", hash = "sha256:6cbf7dd4108588c734aa588131d8e1d52e64f0873870f71f74cbac195f0c60ef"}, -] -gitdb = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] -gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, -] -google-auth = [ - {file = "google-auth-2.14.1.tar.gz", hash = "sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d"}, - {file = "google_auth-2.14.1-py2.py3-none-any.whl", hash = "sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016"}, -] -google-auth-oauthlib = [ - {file = "google-auth-oauthlib-0.4.6.tar.gz", hash = "sha256:a90a072f6993f2c327067bf65270046384cda5a8ecb20b94ea9a687f1f233a7a"}, - {file = "google_auth_oauthlib-0.4.6-py2.py3-none-any.whl", hash = "sha256:3f2a6e802eebbb6fb736a370fbf3b055edcb6b52878bf2f26330b5e041316c73"}, -] -google-pasta = [ - {file = "google-pasta-0.2.0.tar.gz", hash = "sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e"}, - {file = "google_pasta-0.2.0-py2-none-any.whl", hash = "sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954"}, - {file = "google_pasta-0.2.0-py3-none-any.whl", hash = "sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed"}, -] -grpcio = [ - {file = "grpcio-1.50.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:906f4d1beb83b3496be91684c47a5d870ee628715227d5d7c54b04a8de802974"}, - {file = "grpcio-1.50.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:2d9fd6e38b16c4d286a01e1776fdf6c7a4123d99ae8d6b3f0b4a03a34bf6ce45"}, - {file = "grpcio-1.50.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4b123fbb7a777a2fedec684ca0b723d85e1d2379b6032a9a9b7851829ed3ca9a"}, - {file = "grpcio-1.50.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2f77a90ba7b85bfb31329f8eab9d9540da2cf8a302128fb1241d7ea239a5469"}, - {file = "grpcio-1.50.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eea18a878cffc804506d39c6682d71f6b42ec1c151d21865a95fae743fda500"}, - {file = "grpcio-1.50.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b71916fa8f9eb2abd93151fafe12e18cebb302686b924bd4ec39266211da525"}, - {file = "grpcio-1.50.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:95ce51f7a09491fb3da8cf3935005bff19983b77c4e9437ef77235d787b06842"}, - {file = "grpcio-1.50.0-cp310-cp310-win32.whl", hash = "sha256:f7025930039a011ed7d7e7ef95a1cb5f516e23c5a6ecc7947259b67bea8e06ca"}, - {file = "grpcio-1.50.0-cp310-cp310-win_amd64.whl", hash = "sha256:05f7c248e440f538aaad13eee78ef35f0541e73498dd6f832fe284542ac4b298"}, - {file = "grpcio-1.50.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:ca8a2254ab88482936ce941485c1c20cdeaef0efa71a61dbad171ab6758ec998"}, - {file = "grpcio-1.50.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3b611b3de3dfd2c47549ca01abfa9bbb95937eb0ea546ea1d762a335739887be"}, - {file = "grpcio-1.50.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a4cd8cb09d1bc70b3ea37802be484c5ae5a576108bad14728f2516279165dd7"}, - {file = "grpcio-1.50.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:156f8009e36780fab48c979c5605eda646065d4695deea4cfcbcfdd06627ddb6"}, - {file = "grpcio-1.50.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de411d2b030134b642c092e986d21aefb9d26a28bf5a18c47dd08ded411a3bc5"}, - {file = "grpcio-1.50.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d144ad10eeca4c1d1ce930faa105899f86f5d99cecfe0d7224f3c4c76265c15e"}, - {file = "grpcio-1.50.0-cp311-cp311-win32.whl", hash = "sha256:92d7635d1059d40d2ec29c8bf5ec58900120b3ce5150ef7414119430a4b2dd5c"}, - {file = "grpcio-1.50.0-cp311-cp311-win_amd64.whl", hash = "sha256:ce8513aee0af9c159319692bfbf488b718d1793d764798c3d5cff827a09e25ef"}, - {file = "grpcio-1.50.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:8e8999a097ad89b30d584c034929f7c0be280cd7851ac23e9067111167dcbf55"}, - {file = "grpcio-1.50.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:a50a1be449b9e238b9bd43d3857d40edf65df9416dea988929891d92a9f8a778"}, - {file = "grpcio-1.50.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:cf151f97f5f381163912e8952eb5b3afe89dec9ed723d1561d59cabf1e219a35"}, - {file = "grpcio-1.50.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a23d47f2fc7111869f0ff547f771733661ff2818562b04b9ed674fa208e261f4"}, - {file = "grpcio-1.50.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84d04dec64cc4ed726d07c5d17b73c343c8ddcd6b59c7199c801d6bbb9d9ed1"}, - {file = "grpcio-1.50.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:67dd41a31f6fc5c7db097a5c14a3fa588af54736ffc174af4411d34c4f306f68"}, - {file = "grpcio-1.50.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d4c8e73bf20fb53fe5a7318e768b9734cf122fe671fcce75654b98ba12dfb75"}, - {file = "grpcio-1.50.0-cp37-cp37m-win32.whl", hash = "sha256:7489dbb901f4fdf7aec8d3753eadd40839c9085967737606d2c35b43074eea24"}, - {file = "grpcio-1.50.0-cp37-cp37m-win_amd64.whl", hash = "sha256:531f8b46f3d3db91d9ef285191825d108090856b3bc86a75b7c3930f16ce432f"}, - {file = "grpcio-1.50.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:d534d169673dd5e6e12fb57cc67664c2641361e1a0885545495e65a7b761b0f4"}, - {file = "grpcio-1.50.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:1d8d02dbb616c0a9260ce587eb751c9c7dc689bc39efa6a88cc4fa3e9c138a7b"}, - {file = "grpcio-1.50.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:baab51dcc4f2aecabf4ed1e2f57bceab240987c8b03533f1cef90890e6502067"}, - {file = "grpcio-1.50.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40838061e24f960b853d7bce85086c8e1b81c6342b1f4c47ff0edd44bbae2722"}, - {file = "grpcio-1.50.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:931e746d0f75b2a5cff0a1197d21827a3a2f400c06bace036762110f19d3d507"}, - {file = "grpcio-1.50.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15f9e6d7f564e8f0776770e6ef32dac172c6f9960c478616c366862933fa08b4"}, - {file = "grpcio-1.50.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a4c23e54f58e016761b576976da6a34d876420b993f45f66a2bfb00363ecc1f9"}, - {file = "grpcio-1.50.0-cp38-cp38-win32.whl", hash = "sha256:3e4244c09cc1b65c286d709658c061f12c61c814be0b7030a2d9966ff02611e0"}, - {file = "grpcio-1.50.0-cp38-cp38-win_amd64.whl", hash = "sha256:8e69aa4e9b7f065f01d3fdcecbe0397895a772d99954bb82eefbb1682d274518"}, - {file = "grpcio-1.50.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:af98d49e56605a2912cf330b4627e5286243242706c3a9fa0bcec6e6f68646fc"}, - {file = "grpcio-1.50.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:080b66253f29e1646ac53ef288c12944b131a2829488ac3bac8f52abb4413c0d"}, - {file = "grpcio-1.50.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ab5d0e3590f0a16cb88de4a3fa78d10eb66a84ca80901eb2c17c1d2c308c230f"}, - {file = "grpcio-1.50.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb11464f480e6103c59d558a3875bd84eed6723f0921290325ebe97262ae1347"}, - {file = "grpcio-1.50.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e07fe0d7ae395897981d16be61f0db9791f482f03fee7d1851fe20ddb4f69c03"}, - {file = "grpcio-1.50.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d75061367a69808ab2e84c960e9dce54749bcc1e44ad3f85deee3a6c75b4ede9"}, - {file = "grpcio-1.50.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ae23daa7eda93c1c49a9ecc316e027ceb99adbad750fbd3a56fa9e4a2ffd5ae0"}, - {file = "grpcio-1.50.0-cp39-cp39-win32.whl", hash = "sha256:177afaa7dba3ab5bfc211a71b90da1b887d441df33732e94e26860b3321434d9"}, - {file = "grpcio-1.50.0-cp39-cp39-win_amd64.whl", hash = "sha256:ea8ccf95e4c7e20419b7827aa5b6da6f02720270686ac63bd3493a651830235c"}, - {file = "grpcio-1.50.0.tar.gz", hash = "sha256:12b479839a5e753580b5e6053571de14006157f2ef9b71f38c56dc9b23b95ad6"}, -] -h5py = [ - {file = "h5py-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d77af42cb751ad6cc44f11bae73075a07429a5cf2094dfde2b1e716e059b3911"}, - {file = "h5py-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63beb8b7b47d0896c50de6efb9a1eaa81dbe211f3767e7dd7db159cea51ba37a"}, - {file = "h5py-3.7.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:04e2e1e2fc51b8873e972a08d2f89625ef999b1f2d276199011af57bb9fc7851"}, - {file = "h5py-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73307c876af49aa869ec5df1818e9bb0bdcfcf8a5ba773cc45a4fba5a286a5c"}, - {file = "h5py-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:f514b24cacdd983e61f8d371edac8c1b780c279d0acb8485639e97339c866073"}, - {file = "h5py-3.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:43fed4d13743cf02798a9a03a360a88e589d81285e72b83f47d37bb64ed44881"}, - {file = "h5py-3.7.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c038399ce09a58ff8d89ec3e62f00aa7cb82d14f34e24735b920e2a811a3a426"}, - {file = "h5py-3.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03d64fb86bb86b978928bad923b64419a23e836499ec6363e305ad28afd9d287"}, - {file = "h5py-3.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e5b7820b75f9519499d76cc708e27242ccfdd9dfb511d6deb98701961d0445aa"}, - {file = "h5py-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a9351d729ea754db36d175098361b920573fdad334125f86ac1dd3a083355e20"}, - {file = "h5py-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6776d896fb90c5938de8acb925e057e2f9f28755f67ec3edcbc8344832616c38"}, - {file = "h5py-3.7.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a047fddbe6951bce40e9cde63373c838a978c5e05a011a682db9ba6334b8e85"}, - {file = "h5py-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0798a9c0ff45f17d0192e4d7114d734cac9f8b2b2c76dd1d923c4d0923f27bb6"}, - {file = "h5py-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:0d8de8cb619fc597da7cf8cdcbf3b7ff8c5f6db836568afc7dc16d21f59b2b49"}, - {file = "h5py-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f084bbe816907dfe59006756f8f2d16d352faff2d107f4ffeb1d8de126fc5dc7"}, - {file = "h5py-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fcb11a2dc8eb7ddcae08afd8fae02ba10467753a857fa07a404d700a93f3d53"}, - {file = "h5py-3.7.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed43e2cc4f511756fd664fb45d6b66c3cbed4e3bd0f70e29c37809b2ae013c44"}, - {file = "h5py-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e7535df5ee3dc3e5d1f408fdfc0b33b46bc9b34db82743c82cd674d8239b9ad"}, - {file = "h5py-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:9e2ad2aa000f5b1e73b5dfe22f358ca46bf1a2b6ca394d9659874d7fc251731a"}, - {file = "h5py-3.7.0.tar.gz", hash = "sha256:3fcf37884383c5da64846ab510190720027dca0768def34dd8dcb659dbe5cbf3"}, -] -hdfs = [ - {file = "hdfs-2.7.0-py3-none-any.whl", hash = "sha256:3428078ad1e83a2e2a11801c536ac2aa5094f5fabde5d1e7145bacbf4a599c1e"}, - {file = "hdfs-2.7.0.tar.gz", hash = "sha256:ecd4650c39bb4f9421641320f4931edd81cf7126ae4e5ec880215adf6435df3d"}, -] -html5lib = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] -httplib2 = [ - {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, - {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, -] -huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -importlib-metadata = [ - {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, - {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -joblib = [ - {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"}, - {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"}, -] -jsonlines = [ - {file = "jsonlines-3.1.0-py3-none-any.whl", hash = "sha256:632f5e38f93dfcb1ac8c4e09780b92af3a55f38f26e7c47ae85109d420b6ad39"}, - {file = "jsonlines-3.1.0.tar.gz", hash = "sha256:2579cb488d96f815b0eb81629e3e6b0332da0962a18fa3532958f7ba14a5c37f"}, -] -kenlm = [] -keras = [ - {file = "keras-2.10.0-py2.py3-none-any.whl", hash = "sha256:26a6e2c2522e7468ddea22710a99b3290493768fc08a39e75d1173a0e3452fdf"}, -] -keras-preprocessing = [ - {file = "Keras_Preprocessing-1.1.2-py2.py3-none-any.whl", hash = "sha256:7b82029b130ff61cc99b55f3bd27427df4838576838c5b2f65940e4fcec99a7b"}, - {file = "Keras_Preprocessing-1.1.2.tar.gz", hash = "sha256:add82567c50c8bc648c14195bf544a5ce7c1f76761536956c3d2978970179ef3"}, -] -kss = [ - {file = "kss-2.6.0-py3-none-any.whl", hash = "sha256:fedbdcd0bfc33111d7817866dd60346dab79f9f1ca5bab0026c4ee40e5941b0c"}, -] -libclang = [ - {file = "libclang-14.0.6-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:8791cf3c3b087c373a6d61e9199da7a541da922c9ddcfed1122090586b996d6e"}, - {file = "libclang-14.0.6-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b06fc76bd1e67c8b04b5719bf2ac5d6a323b289b245dfa9e468561d99538188"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:e429853939423f276a25140b0b702442d7da9a09e001c05e48df888336947614"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux2010_x86_64.whl", hash = "sha256:206d2789e4450a37d054e63b70451a6fc1873466397443fa13de2b3d4adb2796"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:e2add1703129b2abe066fb1890afa880870a89fd6ab4ec5d2a7a8dc8d271677e"}, - {file = "libclang-14.0.6-py2.py3-none-manylinux2014_armv7l.whl", hash = "sha256:5dd3c6fca1b007d308a4114afa8e4e9d32f32b2572520701d45fcc626ac5cd6c"}, - {file = "libclang-14.0.6-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cfb0e892ebb5dff6bd498ab5778adb8581f26a00fd8347b3c76c989fe2fd04f7"}, - {file = "libclang-14.0.6-py2.py3-none-win_amd64.whl", hash = "sha256:ea03c12675151837660cdd5dce65bd89320896ac3421efef43a36678f113ce95"}, - {file = "libclang-14.0.6-py2.py3-none-win_arm64.whl", hash = "sha256:2e4303e04517fcd11173cb2e51a7070eed71e16ef45d4e26a82c5e881cac3d27"}, - {file = "libclang-14.0.6.tar.gz", hash = "sha256:9052a8284d8846984f6fa826b1d7460a66d3b23a486d782633b42b6e3b418789"}, -] -libcommon = [ - {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:0267504716992f562382ff5029ace87444fd12793f2393f3800921d384a0fd52"}, -] -librosa = [ - {file = "librosa-0.9.2-py3-none-any.whl", hash = "sha256:322a813e6d37af9fbc369e6a637dcf5fdc5c6925ce806a0d27c68de61a81350f"}, - {file = "librosa-0.9.2.tar.gz", hash = "sha256:5b576b5efdce428e90bc988bdd5a953d12a727e5f931f30d74c53b63abbe3c89"}, -] -llvmlite = [ - {file = "llvmlite-0.39.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6717c7a6e93c9d2c3d07c07113ec80ae24af45cde536b34363d4bcd9188091d9"}, - {file = "llvmlite-0.39.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ddab526c5a2c4ccb8c9ec4821fcea7606933dc53f510e2a6eebb45a418d3488a"}, - {file = "llvmlite-0.39.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3f331a323d0f0ada6b10d60182ef06c20a2f01be21699999d204c5750ffd0b4"}, - {file = "llvmlite-0.39.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c00ff204afa721b0bb9835b5bf1ba7fba210eefcec5552a9e05a63219ba0dc"}, - {file = "llvmlite-0.39.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16f56eb1eec3cda3a5c526bc3f63594fc24e0c8d219375afeb336f289764c6c7"}, - {file = "llvmlite-0.39.1-cp310-cp310-win32.whl", hash = "sha256:d0bfd18c324549c0fec2c5dc610fd024689de6f27c6cc67e4e24a07541d6e49b"}, - {file = "llvmlite-0.39.1-cp310-cp310-win_amd64.whl", hash = "sha256:7ebf1eb9badc2a397d4f6a6c8717447c81ac011db00064a00408bc83c923c0e4"}, - {file = "llvmlite-0.39.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6546bed4e02a1c3d53a22a0bced254b3b6894693318b16c16c8e43e29d6befb6"}, - {file = "llvmlite-0.39.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1578f5000fdce513712e99543c50e93758a954297575610f48cb1fd71b27c08a"}, - {file = "llvmlite-0.39.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3803f11ad5f6f6c3d2b545a303d68d9fabb1d50e06a8d6418e6fcd2d0df00959"}, - {file = "llvmlite-0.39.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50aea09a2b933dab7c9df92361b1844ad3145bfb8dd2deb9cd8b8917d59306fb"}, - {file = "llvmlite-0.39.1-cp37-cp37m-win32.whl", hash = "sha256:b1a0bbdb274fb683f993198775b957d29a6f07b45d184c571ef2a721ce4388cf"}, - {file = "llvmlite-0.39.1-cp37-cp37m-win_amd64.whl", hash = "sha256:e172c73fccf7d6db4bd6f7de963dedded900d1a5c6778733241d878ba613980e"}, - {file = "llvmlite-0.39.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e31f4b799d530255aaf0566e3da2df5bfc35d3cd9d6d5a3dcc251663656c27b1"}, - {file = "llvmlite-0.39.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62c0ea22e0b9dffb020601bb65cb11dd967a095a488be73f07d8867f4e327ca5"}, - {file = "llvmlite-0.39.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ffc84ade195abd4abcf0bd3b827b9140ae9ef90999429b9ea84d5df69c9058c"}, - {file = "llvmlite-0.39.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0f158e4708dda6367d21cf15afc58de4ebce979c7a1aa2f6b977aae737e2a54"}, - {file = "llvmlite-0.39.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d36591cd5d02038912321d9ab8e4668e53ae2211da5523f454e992b5e13c36"}, - {file = "llvmlite-0.39.1-cp38-cp38-win32.whl", hash = "sha256:4c6ebace910410daf0bebda09c1859504fc2f33d122e9a971c4c349c89cca630"}, - {file = "llvmlite-0.39.1-cp38-cp38-win_amd64.whl", hash = "sha256:fb62fc7016b592435d3e3a8f680e3ea8897c3c9e62e6e6cc58011e7a4801439e"}, - {file = "llvmlite-0.39.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa9b26939ae553bf30a9f5c4c754db0fb2d2677327f2511e674aa2f5df941789"}, - {file = "llvmlite-0.39.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e4f212c018db951da3e1dc25c2651abc688221934739721f2dad5ff1dd5f90e7"}, - {file = "llvmlite-0.39.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39dc2160aed36e989610fc403487f11b8764b6650017ff367e45384dff88ffbf"}, - {file = "llvmlite-0.39.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ec3d70b3e507515936e475d9811305f52d049281eaa6c8273448a61c9b5b7e2"}, - {file = "llvmlite-0.39.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60f8dd1e76f47b3dbdee4b38d9189f3e020d22a173c00f930b52131001d801f9"}, - {file = "llvmlite-0.39.1-cp39-cp39-win32.whl", hash = "sha256:03aee0ccd81735696474dc4f8b6be60774892a2929d6c05d093d17392c237f32"}, - {file = "llvmlite-0.39.1-cp39-cp39-win_amd64.whl", hash = "sha256:3fc14e757bc07a919221f0cbaacb512704ce5774d7fcada793f1996d6bc75f2a"}, - {file = "llvmlite-0.39.1.tar.gz", hash = "sha256:b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572"}, -] -lm-dataformat = [ - {file = "lm_dataformat-0.0.20-py3-none-any.whl", hash = "sha256:247468181c9c2fea33a663cdb2f6fea489ddf6741d216fe6b466e60f002705af"}, - {file = "lm_dataformat-0.0.20.tar.gz", hash = "sha256:0016165b34d8f004753ac265348c3525532e55088f6c9c160f3597e660207145"}, -] -lockfile = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] -lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, -] -markdown = [ - {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, - {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -marshmallow = [ - {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, - {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mongo-types = [ - {file = "mongo-types-0.15.1.tar.gz", hash = "sha256:0a9deeb7733ea7da5db3711d92e22d93556b522f860bbff82e5df44c53bd06a9"}, - {file = "mongo_types-0.15.1-py3-none-any.whl", hash = "sha256:9417ae5b9a759c09630b5ec7d66904cc333c2d2fcfe75e2760a332ed5e267309"}, -] -mongoengine = [ - {file = "mongoengine-0.24.2-py3-none-any.whl", hash = "sha256:f5c4e1b206b2ccffe4adc7a6283ed26dd799bd115a5fb1d2e885a075132cdb88"}, - {file = "mongoengine-0.24.2.tar.gz", hash = "sha256:c76d49658575bb995682e2e77c8ef7cda63faf939415b32ee923745d120f8b02"}, -] -msgpack = [ - {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, - {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, - {file = "msgpack-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db"}, - {file = "msgpack-1.0.4-cp310-cp310-win32.whl", hash = "sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef"}, - {file = "msgpack-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075"}, - {file = "msgpack-1.0.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae"}, - {file = "msgpack-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6"}, - {file = "msgpack-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661"}, - {file = "msgpack-1.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236"}, - {file = "msgpack-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44"}, - {file = "msgpack-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243"}, - {file = "msgpack-1.0.4-cp38-cp38-win32.whl", hash = "sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2"}, - {file = "msgpack-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae"}, - {file = "msgpack-1.0.4-cp39-cp39-win32.whl", hash = "sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c"}, - {file = "msgpack-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce"}, - {file = "msgpack-1.0.4.tar.gz", hash = "sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f"}, -] -multidict = [ - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, - {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, - {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, - {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, - {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, - {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, - {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, - {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, - {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, - {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, - {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, -] -multiprocess = [ - {file = "multiprocess-0.70.9-cp27-cp27m-win32.whl", hash = "sha256:0e4e65c2e74aa14fa0c9a1f838b5e9a5f8fe5b3a173925792260843c4a6157ec"}, - {file = "multiprocess-0.70.9-cp27-cp27m-win_amd64.whl", hash = "sha256:1eb7dfe2d809d53be92e8a288ed1c01614fe5407bbc9d078ed451a749fb1bd34"}, - {file = "multiprocess-0.70.9.tar.gz", hash = "sha256:9fd5bd990132da77e73dec6e9613408602a4612e1d73caf2e2b813d2b61508e5"}, -] -multivolumefile = [ - {file = "multivolumefile-0.2.3-py3-none-any.whl", hash = "sha256:237f4353b60af1703087cf7725755a1f6fcaeeea48421e1896940cd1c920d678"}, - {file = "multivolumefile-0.2.3.tar.gz", hash = "sha256:a0648d0aafbc96e59198d5c17e9acad7eb531abea51035d08ce8060dcad709d6"}, -] -mypy = [ - {file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"}, - {file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"}, - {file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"}, - {file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"}, - {file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"}, - {file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"}, - {file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"}, - {file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"}, - {file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"}, - {file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"}, - {file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"}, - {file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"}, - {file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"}, - {file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"}, - {file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"}, - {file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"}, - {file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"}, - {file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"}, - {file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"}, - {file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"}, - {file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"}, - {file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -nlp = [ - {file = "nlp-0.4.0-py3-none-any.whl", hash = "sha256:a7335eb3939133d29dfefb507260b3b069bd7bcc662661ad026ff1404545a96c"}, - {file = "nlp-0.4.0.tar.gz", hash = "sha256:0aa6bc966ffc2d2be7248bd71f258360281cd717c10811e1b55bb2fa50bf79d4"}, -] -nltk = [ - {file = "nltk-3.7-py3-none-any.whl", hash = "sha256:ba3de02490308b248f9b94c8bc1ac0683e9aa2ec49ee78536d8667afb5e3eec8"}, - {file = "nltk-3.7.zip", hash = "sha256:d6507d6460cec76d70afea4242a226a7542f85c669177b9c7f562b7cf1b05502"}, -] -numba = [ - {file = "numba-0.56.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9f62672145f8669ec08762895fe85f4cf0ead08ce3164667f2b94b2f62ab23c3"}, - {file = "numba-0.56.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c602d015478b7958408d788ba00a50272649c5186ea8baa6cf71d4a1c761bba1"}, - {file = "numba-0.56.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:85dbaed7a05ff96492b69a8900c5ba605551afb9b27774f7f10511095451137c"}, - {file = "numba-0.56.4-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f4cfc3a19d1e26448032049c79fc60331b104f694cf570a9e94f4e2c9d0932bb"}, - {file = "numba-0.56.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4e08e203b163ace08bad500b0c16f6092b1eb34fd1fce4feaf31a67a3a5ecf3b"}, - {file = "numba-0.56.4-cp310-cp310-win32.whl", hash = "sha256:0611e6d3eebe4cb903f1a836ffdb2bda8d18482bcd0a0dcc56e79e2aa3fefef5"}, - {file = "numba-0.56.4-cp310-cp310-win_amd64.whl", hash = "sha256:fbfb45e7b297749029cb28694abf437a78695a100e7c2033983d69f0ba2698d4"}, - {file = "numba-0.56.4-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:3cb1a07a082a61df80a468f232e452d818f5ae254b40c26390054e4e868556e0"}, - {file = "numba-0.56.4-cp37-cp37m-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d69ad934e13c15684e7887100a8f5f0f61d7a8e57e0fd29d9993210089a5b531"}, - {file = "numba-0.56.4-cp37-cp37m-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:dbcc847bac2d225265d054993a7f910fda66e73d6662fe7156452cac0325b073"}, - {file = "numba-0.56.4-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8a95ca9cc77ea4571081f6594e08bd272b66060634b8324e99cd1843020364f9"}, - {file = "numba-0.56.4-cp37-cp37m-win32.whl", hash = "sha256:fcdf84ba3ed8124eb7234adfbb8792f311991cbf8aed1cad4b1b1a7ee08380c1"}, - {file = "numba-0.56.4-cp37-cp37m-win_amd64.whl", hash = "sha256:42f9e1be942b215df7e6cc9948cf9c15bb8170acc8286c063a9e57994ef82fd1"}, - {file = "numba-0.56.4-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:553da2ce74e8862e18a72a209ed3b6d2924403bdd0fb341fa891c6455545ba7c"}, - {file = "numba-0.56.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4373da9757049db7c90591e9ec55a2e97b2b36ba7ae3bf9c956a513374077470"}, - {file = "numba-0.56.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a993349b90569518739009d8f4b523dfedd7e0049e6838c0e17435c3e70dcc4"}, - {file = "numba-0.56.4-cp38-cp38-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:720886b852a2d62619ae3900fe71f1852c62db4f287d0c275a60219e1643fc04"}, - {file = "numba-0.56.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e64d338b504c9394a4a34942df4627e1e6cb07396ee3b49fe7b8d6420aa5104f"}, - {file = "numba-0.56.4-cp38-cp38-win32.whl", hash = "sha256:03fe94cd31e96185cce2fae005334a8cc712fc2ba7756e52dff8c9400718173f"}, - {file = "numba-0.56.4-cp38-cp38-win_amd64.whl", hash = "sha256:91f021145a8081f881996818474ef737800bcc613ffb1e618a655725a0f9e246"}, - {file = "numba-0.56.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:d0ae9270a7a5cc0ede63cd234b4ff1ce166c7a749b91dbbf45e0000c56d3eade"}, - {file = "numba-0.56.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c75e8a5f810ce80a0cfad6e74ee94f9fde9b40c81312949bf356b7304ef20740"}, - {file = "numba-0.56.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a12ef323c0f2101529d455cfde7f4135eaa147bad17afe10b48634f796d96abd"}, - {file = "numba-0.56.4-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:03634579d10a6129181129de293dd6b5eaabee86881369d24d63f8fe352dd6cb"}, - {file = "numba-0.56.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0240f9026b015e336069329839208ebd70ec34ae5bfbf402e4fcc8e06197528e"}, - {file = "numba-0.56.4-cp39-cp39-win32.whl", hash = "sha256:14dbbabf6ffcd96ee2ac827389afa59a70ffa9f089576500434c34abf9b054a4"}, - {file = "numba-0.56.4-cp39-cp39-win_amd64.whl", hash = "sha256:0da583c532cd72feefd8e551435747e0e0fbb3c0530357e6845fcc11e38d6aea"}, - {file = "numba-0.56.4.tar.gz", hash = "sha256:32d9fef412c81483d7efe0ceb6cf4d3310fde8b624a9cecca00f790573ac96ee"}, -] -numpy = [ - {file = "numpy-1.22.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3"}, - {file = "numpy-1.22.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887"}, - {file = "numpy-1.22.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7228ad13744f63575b3a972d7ee4fd61815b2879998e70930d4ccf9ec721dce0"}, - {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43a8ca7391b626b4c4fe20aefe79fec683279e31e7c79716863b4b25021e0e74"}, - {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a911e317e8c826ea632205e63ed8507e0dc877dcdc49744584dfc363df9ca08c"}, - {file = "numpy-1.22.4-cp310-cp310-win32.whl", hash = "sha256:9ce7df0abeabe7fbd8ccbf343dc0db72f68549856b863ae3dd580255d009648e"}, - {file = "numpy-1.22.4-cp310-cp310-win_amd64.whl", hash = "sha256:3e1ffa4748168e1cc8d3cde93f006fe92b5421396221a02f2274aab6ac83b077"}, - {file = "numpy-1.22.4-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:59d55e634968b8f77d3fd674a3cf0b96e85147cd6556ec64ade018f27e9479e1"}, - {file = "numpy-1.22.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c1d937820db6e43bec43e8d016b9b3165dcb42892ea9f106c70fb13d430ffe72"}, - {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4c5d5eb2ec8da0b4f50c9a843393971f31f1d60be87e0fb0917a49133d257d6"}, - {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64f56fc53a2d18b1924abd15745e30d82a5782b2cab3429aceecc6875bd5add0"}, - {file = "numpy-1.22.4-cp38-cp38-win32.whl", hash = "sha256:fb7a980c81dd932381f8228a426df8aeb70d59bbcda2af075b627bbc50207cba"}, - {file = "numpy-1.22.4-cp38-cp38-win_amd64.whl", hash = "sha256:e96d7f3096a36c8754207ab89d4b3282ba7b49ea140e4973591852c77d09eb76"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4c6036521f11a731ce0648f10c18ae66d7143865f19f7299943c985cdc95afb5"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b89bf9b94b3d624e7bb480344e91f68c1c6c75f026ed6755955117de00917a7c"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d487e06ecbf1dc2f18e7efce82ded4f705f4bd0cd02677ffccfb39e5c284c7e"}, - {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3eb268dbd5cfaffd9448113539e44e2dd1c5ca9ce25576f7c04a5453edc26fa"}, - {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37431a77ceb9307c28382c9773da9f306435135fae6b80b62a11c53cfedd8802"}, - {file = "numpy-1.22.4-cp39-cp39-win32.whl", hash = "sha256:cc7f00008eb7d3f2489fca6f334ec19ca63e31371be28fd5dad955b16ec285bd"}, - {file = "numpy-1.22.4-cp39-cp39-win_amd64.whl", hash = "sha256:f0725df166cf4785c0bc4cbfb320203182b1ecd30fee6e541c8752a92df6aa32"}, - {file = "numpy-1.22.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0791fbd1e43bf74b3502133207e378901272f3c156c4df4954cad833b1380207"}, - {file = "numpy-1.22.4.zip", hash = "sha256:425b390e4619f58d8526b3dcf656dde069133ae5c240229821f01b5f44ea07af"}, -] -oauthlib = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] -openpyxl = [ - {file = "openpyxl-3.0.10-py2.py3-none-any.whl", hash = "sha256:0ab6d25d01799f97a9464630abacbb34aafecdcaa0ef3cba6d6b3499867d0355"}, - {file = "openpyxl-3.0.10.tar.gz", hash = "sha256:e47805627aebcf860edb4edf7987b1309c1b3632f3750538ed962bbcc3bd7449"}, -] -opt-einsum = [ - {file = "opt_einsum-3.3.0-py3-none-any.whl", hash = "sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147"}, - {file = "opt_einsum-3.3.0.tar.gz", hash = "sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549"}, -] -orjson = [ - {file = "orjson-3.8.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:43e69b360c2851b45c7dbab3b95f7fa8469df73fab325a683f7389c4db63aa71"}, - {file = "orjson-3.8.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:64c5da5c9679ef3d85e9bbcbb62f4ccdc1f1975780caa20f2ec1e37b4da6bd36"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c632a2157fa9ec098d655287e9e44809615af99837c49f53d96bfbca453c5bd"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f63da6309c282a2b58d4a846f0717f6440356b4872838b9871dc843ed1fe2b38"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9be25c313ba2d5478829d949165445c3bd36c62e07092b4ba8dbe5426574d1"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4bcce53e9e088f82633f784f79551fcd7637943ab56c51654aaf9d4c1d5cfa54"}, - {file = "orjson-3.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:33edb5379c6e6337f9383c85fe4080ce3aa1057cc2ce29345b7239461f50cbd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:da35d347115758bbc8bfaf39bb213c42000f2a54e3f504c84374041d20835cd6"}, - {file = "orjson-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d755d94a90a941b91b4d39a6b02e289d8ba358af2d1a911edf266be7942609dc"}, - {file = "orjson-3.8.2-cp310-none-win_amd64.whl", hash = "sha256:7ea96923e26390b2142602ebb030e2a4db9351134696e0b219e5106bddf9b48e"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:a0d89de876e6f1cef917a2338378a60a98584e1c2e1c67781e20b6ed1c512478"}, - {file = "orjson-3.8.2-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8d47e7592fe938aec898eb22ea4946298c018133df084bc78442ff18e2c6347c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d9f1043f618d0c64228aab9711e5bd822253c50b6c56223951e32b51f81d62"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed10600e8b08f1e87b656ad38ab316191ce94f2c9adec57035680c0dc9e93c81"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99c49e49a04bf61fee7aaea6d92ac2b1fcf6507aea894bbdf3fbb25fe792168c"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1463674f8efe6984902473d7b5ce3edf444c1fcd09dc8aa4779638a28fb9ca01"}, - {file = "orjson-3.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c1ef75f1d021d817e5c60a42da0b4b7e3123b1b37415260b8415666ddacc7cd7"}, - {file = "orjson-3.8.2-cp311-none-win_amd64.whl", hash = "sha256:b6007e1ac8564b13b2521720929e8bb3ccd3293d9fdf38f28728dcc06db6248f"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a02c13ae523221576b001071354380e277346722cc6b7fdaacb0fd6db5154b3e"}, - {file = "orjson-3.8.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fa2e565cf8ffdb37ce1887bd1592709ada7f701e61aa4b1e710be94b0aecbab4"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d8864288f7c5fccc07b43394f83b721ddc999f25dccfb5d0651671a76023f5"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1874c05d0bb994601fa2d51605cb910d09343c6ebd36e84a573293523fab772a"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349387ed6989e5db22e08c9af8d7ca14240803edc50de451d48d41a0e7be30f6"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4e42b19619d6e97e201053b865ca4e62a48da71165f4081508ada8e1b91c6a30"}, - {file = "orjson-3.8.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:bc112c17e607c59d1501e72afb44226fa53d947d364aed053f0c82d153e29616"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6fda669211f2ed1fc2c8130187ec90c96b4f77b6a250004e666d2ef8ed524e5f"}, - {file = "orjson-3.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aebd4e80fea0f20578fd0452908b9206a6a0d5ae9f5c99b6e665bbcd989e56cd"}, - {file = "orjson-3.8.2-cp37-none-win_amd64.whl", hash = "sha256:9f3cd0394eb6d265beb2a1572b5663bc910883ddbb5cdfbcb660f5a0444e7fd8"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:74e7d54d11b3da42558d69a23bf92c2c48fabf69b38432d5eee2c5b09cd4c433"}, - {file = "orjson-3.8.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8cbadc9be748a823f9c743c7631b1ee95d3925a9c0b21de4e862a1d57daa10ec"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07d5a8c69a2947d9554a00302734fe3d8516415c8b280963c92bc1033477890"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b364ea01d1b71b9f97bf97af9eb79ebee892df302e127a9e2e4f8eaa74d6b98"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b98a8c825a59db94fbe8e0cce48618624c5a6fb1436467322d90667c08a0bf80"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ab63103f60b516c0fce9b62cb4773f689a82ab56e19ef2387b5a3182f80c0d78"}, - {file = "orjson-3.8.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:73ab3f4288389381ae33ab99f914423b69570c88d626d686764634d5e0eeb909"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ab3fd8728e12c36e20c6d9d70c9e15033374682ce5acb6ed6a08a80dacd254d"}, - {file = "orjson-3.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cde11822cf71a7f0daaa84223249b2696a2b6cda7fa587e9fd762dff1a8848e4"}, - {file = "orjson-3.8.2-cp38-none-win_amd64.whl", hash = "sha256:b14765ea5aabfeab1a194abfaa0be62c9fee6480a75ac8c6974b4eeede3340b4"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:6068a27d59d989d4f2864c2fc3440eb7126a0cfdfaf8a4ad136b0ffd932026ae"}, - {file = "orjson-3.8.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6bf36fa759a1b941fc552ad76b2d7fb10c1d2a20c056be291ea45eb6ae1da09b"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f436132e62e647880ca6988974c8e3165a091cb75cbed6c6fd93e931630c22fa"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ecd8936259a5920b52a99faf62d4efeb9f5e25a0aacf0cce1e9fa7c37af154f"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13114b345cda33644f64e92fe5d8737828766cf02fbbc7d28271a95ea546832"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6e43cdc3ddf96bdb751b748b1984b701125abacca8fc2226b808d203916e8cba"}, - {file = "orjson-3.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ee39071da2026b11e4352d6fc3608a7b27ee14bc699fd240f4e604770bc7a255"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1c3833976ebbeb3b5b6298cb22e23bf18453f6b80802103b7d08f7dd8a61611d"}, - {file = "orjson-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b9a34519d3d70935e1cd3797fbed8fbb6f61025182bea0140ca84d95b6f8fbe5"}, - {file = "orjson-3.8.2-cp39-none-win_amd64.whl", hash = "sha256:2734086d9a3dd9591c4be7d05aff9beccc086796d3f243685e56b7973ebac5bc"}, - {file = "orjson-3.8.2.tar.gz", hash = "sha256:a2fb95a45031ccf278e44341027b3035ab99caa32aa173279b1f0a06324f434b"}, -] -packageurl-python = [ - {file = "packageurl-python-0.10.4.tar.gz", hash = "sha256:5c91334f942cd55d45eb0c67dd339a535ef90e25f05b9ec016ad188ed0ef9048"}, - {file = "packageurl_python-0.10.4-py3-none-any.whl", hash = "sha256:bf8a1ffe755634776f6563904d792fb0aa13b377fc86115c36fe17f69b6e59db"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pandas = [ - {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9dbacd22555c2d47f262ef96bb4e30880e5956169741400af8b306bbb24a273"}, - {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e2b83abd292194f350bb04e188f9379d36b8dfac24dd445d5c87575f3beaf789"}, - {file = "pandas-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2552bffc808641c6eb471e55aa6899fa002ac94e4eebfa9ec058649122db5824"}, - {file = "pandas-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc87eac0541a7d24648a001d553406f4256e744d92df1df8ebe41829a915028"}, - {file = "pandas-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d8fd58df5d17ddb8c72a5075d87cd80d71b542571b5f78178fb067fa4e9c72"}, - {file = "pandas-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:4aed257c7484d01c9a194d9a94758b37d3d751849c05a0050c087a358c41ad1f"}, - {file = "pandas-1.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:375262829c8c700c3e7cbb336810b94367b9c4889818bbd910d0ecb4e45dc261"}, - {file = "pandas-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc3cd122bea268998b79adebbb8343b735a5511ec14efb70a39e7acbc11ccbdc"}, - {file = "pandas-1.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4f5a82afa4f1ff482ab8ded2ae8a453a2cdfde2001567b3ca24a4c5c5ca0db3"}, - {file = "pandas-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8092a368d3eb7116e270525329a3e5c15ae796ccdf7ccb17839a73b4f5084a39"}, - {file = "pandas-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6257b314fc14958f8122779e5a1557517b0f8e500cfb2bd53fa1f75a8ad0af2"}, - {file = "pandas-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:82ae615826da838a8e5d4d630eb70c993ab8636f0eff13cb28aafc4291b632b5"}, - {file = "pandas-1.5.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:457d8c3d42314ff47cc2d6c54f8fc0d23954b47977b2caed09cd9635cb75388b"}, - {file = "pandas-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c009a92e81ce836212ce7aa98b219db7961a8b95999b97af566b8dc8c33e9519"}, - {file = "pandas-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71f510b0efe1629bf2f7c0eadb1ff0b9cf611e87b73cd017e6b7d6adb40e2b3a"}, - {file = "pandas-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a40dd1e9f22e01e66ed534d6a965eb99546b41d4d52dbdb66565608fde48203f"}, - {file = "pandas-1.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae7e989f12628f41e804847a8cc2943d362440132919a69429d4dea1f164da0"}, - {file = "pandas-1.5.2-cp38-cp38-win32.whl", hash = "sha256:530948945e7b6c95e6fa7aa4be2be25764af53fba93fe76d912e35d1c9ee46f5"}, - {file = "pandas-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:73f219fdc1777cf3c45fde7f0708732ec6950dfc598afc50588d0d285fddaefc"}, - {file = "pandas-1.5.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9608000a5a45f663be6af5c70c3cbe634fa19243e720eb380c0d378666bc7702"}, - {file = "pandas-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:315e19a3e5c2ab47a67467fc0362cb36c7c60a93b6457f675d7d9615edad2ebe"}, - {file = "pandas-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e18bc3764cbb5e118be139b3b611bc3fbc5d3be42a7e827d1096f46087b395eb"}, - {file = "pandas-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0183cb04a057cc38fde5244909fca9826d5d57c4a5b7390c0cc3fa7acd9fa883"}, - {file = "pandas-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344021ed3e639e017b452aa8f5f6bf38a8806f5852e217a7594417fb9bbfa00e"}, - {file = "pandas-1.5.2-cp39-cp39-win32.whl", hash = "sha256:e7469271497960b6a781eaa930cba8af400dd59b62ec9ca2f4d31a19f2f91090"}, - {file = "pandas-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:c218796d59d5abd8780170c937b812c9637e84c32f8271bbf9845970f8c1351f"}, - {file = "pandas-1.5.2.tar.gz", hash = "sha256:220b98d15cee0b2cd839a6358bd1f273d0356bf964c1a1aeb32d47db0215488b"}, -] -pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, -] -pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, -] -pillow = [ - {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, - {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, - {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, - {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, - {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, - {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, - {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, - {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, - {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, - {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, - {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, - {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, - {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, - {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, - {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, - {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, - {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, - {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, -] -pip = [ - {file = "pip-22.3.1-py3-none-any.whl", hash = "sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077"}, - {file = "pip-22.3.1.tar.gz", hash = "sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38"}, -] -pip-api = [ - {file = "pip-api-0.0.30.tar.gz", hash = "sha256:a05df2c7aa9b7157374bcf4273544201a0c7bae60a9c65bcf84f3959ef3896f3"}, - {file = "pip_api-0.0.30-py3-none-any.whl", hash = "sha256:2a0314bd31522eb9ffe8a99668b0d07fee34ebc537931e7b6483001dbedcbdc9"}, -] -pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, -] -pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, -] -platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -poetryup = [ - {file = "poetryup-0.3.15-py3-none-any.whl", hash = "sha256:db068f55d10c0f89c76ea2b62c6bb81c0b0512454f7a83bdc0a13c146e5fb13e"}, - {file = "poetryup-0.3.15.tar.gz", hash = "sha256:efa4e7bb0cd005db4aff3cc678c8bfba9474ef42d5759c0168f2a55fc0f17bc3"}, -] -pooch = [ - {file = "pooch-1.6.0-py3-none-any.whl", hash = "sha256:3bf0e20027096836b8dbce0152dbb785a269abeb621618eb4bdd275ff1e23c9c"}, - {file = "pooch-1.6.0.tar.gz", hash = "sha256:57d20ec4b10dd694d2b05bb64bc6b109c6e85a6c1405794ce87ed8b341ab3f44"}, -] -proto-plus = [ - {file = "proto-plus-1.22.1.tar.gz", hash = "sha256:6c7dfd122dfef8019ff654746be4f5b1d9c80bba787fe9611b508dd88be3a2fa"}, - {file = "proto_plus-1.22.1-py3-none-any.whl", hash = "sha256:ea8982669a23c379f74495bc48e3dcb47c822c484ce8ee1d1d7beb339d4e34c5"}, -] -protobuf = [ - {file = "protobuf-3.19.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:010be24d5a44be7b0613750ab40bc8b8cedc796db468eae6c779b395f50d1fa1"}, - {file = "protobuf-3.19.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11478547958c2dfea921920617eb457bc26867b0d1aa065ab05f35080c5d9eb6"}, - {file = "protobuf-3.19.6-cp310-cp310-win32.whl", hash = "sha256:559670e006e3173308c9254d63facb2c03865818f22204037ab76f7a0ff70b5f"}, - {file = "protobuf-3.19.6-cp310-cp310-win_amd64.whl", hash = "sha256:347b393d4dd06fb93a77620781e11c058b3b0a5289262f094379ada2920a3730"}, - {file = "protobuf-3.19.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a8ce5ae0de28b51dff886fb922012dad885e66176663950cb2344c0439ecb473"}, - {file = "protobuf-3.19.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b0d02163c4e67279ddb6dc25e063db0130fc299aefabb5d481053509fae5c8"}, - {file = "protobuf-3.19.6-cp36-cp36m-win32.whl", hash = "sha256:30f5370d50295b246eaa0296533403961f7e64b03ea12265d6dfce3a391d8992"}, - {file = "protobuf-3.19.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0c0714b025ec057b5a7600cb66ce7c693815f897cfda6d6efb58201c472e3437"}, - {file = "protobuf-3.19.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5057c64052a1f1dd7d4450e9aac25af6bf36cfbfb3a1cd89d16393a036c49157"}, - {file = "protobuf-3.19.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:bb6776bd18f01ffe9920e78e03a8676530a5d6c5911934c6a1ac6eb78973ecb6"}, - {file = "protobuf-3.19.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a04134866861b11556a82dd91ea6daf1f4925746b992f277b84013a7cc1229"}, - {file = "protobuf-3.19.6-cp37-cp37m-win32.whl", hash = "sha256:4bc98de3cdccfb5cd769620d5785b92c662b6bfad03a202b83799b6ed3fa1fa7"}, - {file = "protobuf-3.19.6-cp37-cp37m-win_amd64.whl", hash = "sha256:aa3b82ca1f24ab5326dcf4ea00fcbda703e986b22f3d27541654f749564d778b"}, - {file = "protobuf-3.19.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2b2d2913bcda0e0ec9a784d194bc490f5dc3d9d71d322d070b11a0ade32ff6ba"}, - {file = "protobuf-3.19.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d0b635cefebd7a8a0f92020562dead912f81f401af7e71f16bf9506ff3bdbb38"}, - {file = "protobuf-3.19.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a552af4dc34793803f4e735aabe97ffc45962dfd3a237bdde242bff5a3de684"}, - {file = "protobuf-3.19.6-cp38-cp38-win32.whl", hash = "sha256:0469bc66160180165e4e29de7f445e57a34ab68f49357392c5b2f54c656ab25e"}, - {file = "protobuf-3.19.6-cp38-cp38-win_amd64.whl", hash = "sha256:91d5f1e139ff92c37e0ff07f391101df77e55ebb97f46bbc1535298d72019462"}, - {file = "protobuf-3.19.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c0ccd3f940fe7f3b35a261b1dd1b4fc850c8fde9f74207015431f174be5976b3"}, - {file = "protobuf-3.19.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:30a15015d86b9c3b8d6bf78d5b8c7749f2512c29f168ca259c9d7727604d0e39"}, - {file = "protobuf-3.19.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:878b4cd080a21ddda6ac6d1e163403ec6eea2e206cf225982ae04567d39be7b0"}, - {file = "protobuf-3.19.6-cp39-cp39-win32.whl", hash = "sha256:5a0d7539a1b1fb7e76bf5faa0b44b30f812758e989e59c40f77a7dab320e79b9"}, - {file = "protobuf-3.19.6-cp39-cp39-win_amd64.whl", hash = "sha256:bbf5cea5048272e1c60d235c7bd12ce1b14b8a16e76917f371c718bd3005f045"}, - {file = "protobuf-3.19.6-py2.py3-none-any.whl", hash = "sha256:14082457dc02be946f60b15aad35e9f5c69e738f80ebbc0900a19bc83734a5a4"}, - {file = "protobuf-3.19.6.tar.gz", hash = "sha256:5f5540d57a43042389e87661c6eaa50f47c19c6176e8cf1c4f287aeefeccb5c4"}, -] -psutil = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] -py7zr = [ - {file = "py7zr-0.17.4-py3-none-any.whl", hash = "sha256:69489b15f6ed1fdee1380092541f02fba193ea8fb5a854bc6ff9cd78cce3440d"}, - {file = "py7zr-0.17.4.tar.gz", hash = "sha256:1df67edaa8dd1613fc5a7de3354322e7bc75d989d6069924ce2d08bb7fabdd19"}, -] -pyarrow = [ - {file = "pyarrow-7.0.0-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:0f15213f380539c9640cb2413dc677b55e70f04c9e98cfc2e1d8b36c770e1036"}, - {file = "pyarrow-7.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:29c4e3b3be0b94d07ff4921a5e410fc690a3a066a850a302fc504de5fc638495"}, - {file = "pyarrow-7.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8a9bfc8a016bcb8f9a8536d2fa14a890b340bc7a236275cd60fd4fb8b93ff405"}, - {file = "pyarrow-7.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:49d431ed644a3e8f53ae2bbf4b514743570b495b5829548db51610534b6eeee7"}, - {file = "pyarrow-7.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa6442a321c1e49480b3d436f7d631c895048a16df572cf71c23c6b53c45ed66"}, - {file = "pyarrow-7.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b01a23cb401750092c6f7c4dcae67cd8fd6b99ae710e26f654f23508f25f25"}, - {file = "pyarrow-7.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f10928745c6ff66e121552731409803bed86c66ac79c64c90438b053b5242c5"}, - {file = "pyarrow-7.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:759090caa1474cafb5e68c93a9bd6cb45d8bb8e4f2cad2f1a0cc9439bae8ae88"}, - {file = "pyarrow-7.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e3fe34bcfc28d9c4a747adc3926d2307a04c5c50b89155946739515ccfe5eab0"}, - {file = "pyarrow-7.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:040dce5345603e4e621bcf4f3b21f18d557852e7b15307e559bb14c8951c8714"}, - {file = "pyarrow-7.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed4b647c3345ae3463d341a9d28d0260cd302fb92ecf4e2e3e0f1656d6e0e55c"}, - {file = "pyarrow-7.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7fecd5d5604f47e003f50887a42aee06cb8b7bf8e8bf7dc543a22331d9ba832"}, - {file = "pyarrow-7.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f2d00b892fe865e43346acb78761ba268f8bb1cbdba588816590abcb780ee3d"}, - {file = "pyarrow-7.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f439f7d77201681fd31391d189aa6b1322d27c9311a8f2fce7d23972471b02b6"}, - {file = "pyarrow-7.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:3e06b0e29ce1e32f219c670c6b31c33d25a5b8e29c7828f873373aab78bf30a5"}, - {file = "pyarrow-7.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:13dc05bcf79dbc1bd2de1b05d26eb64824b85883d019d81ca3c2eca9b68b5a44"}, - {file = "pyarrow-7.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:06183a7ff2b0c030ec0413fc4dc98abad8cf336c78c280a0b7f4bcbebb78d125"}, - {file = "pyarrow-7.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:702c5a9f960b56d03569eaaca2c1a05e8728f05ea1a2138ef64234aa53cd5884"}, - {file = "pyarrow-7.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7313038203df77ec4092d6363dbc0945071caa72635f365f2b1ae0dd7469865"}, - {file = "pyarrow-7.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e87d1f7dc7a0b2ecaeb0c7a883a85710f5b5626d4134454f905571c04bc73d5a"}, - {file = "pyarrow-7.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:ba69488ae25c7fde1a2ae9ea29daf04d676de8960ffd6f82e1e13ca945bb5861"}, - {file = "pyarrow-7.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:11a591f11d2697c751261c9d57e6e5b0d38fdc7f0cc57f4fd6edc657da7737df"}, - {file = "pyarrow-7.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:6183c700877852dc0f8a76d4c0c2ffd803ba459e2b4a452e355c2d58d48cf39f"}, - {file = "pyarrow-7.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1748154714b543e6ae8452a68d4af85caf5298296a7e5d4d00f1b3021838ac6"}, - {file = "pyarrow-7.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcc8f934c7847a88f13ec35feecffb61fe63bb7a3078bd98dd353762e969ce60"}, - {file = "pyarrow-7.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:759f59ac77b84878dbd54d06cf6df74ff781b8e7cf9313eeffbb5ec97b94385c"}, - {file = "pyarrow-7.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3e3f93ac2993df9c5e1922eab7bdea047b9da918a74e52145399bc1f0099a3"}, - {file = "pyarrow-7.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:306120af554e7e137895254a3b4741fad682875a5f6403509cd276de3fe5b844"}, - {file = "pyarrow-7.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:087769dac6e567d58d59b94c4f866b3356c00d3db5b261387ece47e7324c2150"}, - {file = "pyarrow-7.0.0.tar.gz", hash = "sha256:da656cad3c23a2ebb6a307ab01d35fce22f7850059cffafcb90d12590f8f4f38"}, -] -pyasn1 = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, -] -pyasn1-modules = [ - {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, - {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, -] -pybcj = [ - {file = "pybcj-1.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20fc0d8f67e2d9747e0c31082d5f64b112258ae602a85aa5c7e6bf5a7cad287b"}, - {file = "pybcj-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43e8bc75773ca06ee7a64602b799613171e4edf4d9d8fd38fa5c49f1cdbb4407"}, - {file = "pybcj-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a81f14f213a75597f9be44feb97740a51adda558465fb159114472dc2ab39ef8"}, - {file = "pybcj-1.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:225a0addf4b3d580bf4eae583b5168dac0125a703c53ded8b3f120882e1e0312"}, - {file = "pybcj-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc79ed4773cd35328377a8fedbbdcafb3a9d242ee63b96863c0692c81faefab8"}, - {file = "pybcj-1.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eaa90639992b6096afb1485380fae7f084483db6b92867847a3bfdf22cc4efc"}, - {file = "pybcj-1.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:570a3cf4e016dcb0fc561991833e5170a2a0bc6ee88fe5667591f356bd7b7895"}, - {file = "pybcj-1.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:46b82fe50eb8171ee2205e935f3fd5900e31beb5e54e10c88f23a5420902467d"}, - {file = "pybcj-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d6b34ec233fcf5a83ccfbf422fef22256947eaa7077aaa012e5961d15aa302c"}, - {file = "pybcj-1.0.1-cp310-cp310-win32.whl", hash = "sha256:fa787b414c4dc6b6cd75338fac18a7dbb53a09443dd863020a2d2bda76940ca6"}, - {file = "pybcj-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:970dc23ca1c64611d35a3abe76a059cf551da53d62faefd84c5bf3e0af1602d1"}, - {file = "pybcj-1.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c854a206d8c3a5a959b803405760f3627bb4878450e2f36b5d35af09c89152fc"}, - {file = "pybcj-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21098001200273c3c9fd90e7bf909fb905a8e1c102c80b604cb7c6a3103ef7e0"}, - {file = "pybcj-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:39dd836134e261ec769cd5aa9ae7a3a330a7dac81efb66eb5504643abd8235df"}, - {file = "pybcj-1.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acfc4a02ddf22f6df7184441b39f38c31e95aa8af41de4d2f825821ab1fb85c6"}, - {file = "pybcj-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4428b6808d781f4b605a27f53fc10a3ca343d1cd901c691b9ba2e4ed85a5fc7"}, - {file = "pybcj-1.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74d34340323996b70dbd73e9530cca71c05ff7c97e30fe4d32aeea2f877836ca"}, - {file = "pybcj-1.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bf87f2a7f827656bc6e1d9888d47931aa0ae35cdc4ff33b1cec70d8d462590b3"}, - {file = "pybcj-1.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e6a74cb618da93ac1322d6a548a4508e76eb4c388ed1c80560bc25d8764cf272"}, - {file = "pybcj-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f472da992a6ba58381c0314b994c01d20e522ff8836417ef1c0975bdae142406"}, - {file = "pybcj-1.0.1-cp311-cp311-win32.whl", hash = "sha256:f58e489e43c9a1688c7d5ceb7455b44952d87f183b7b9c915b301478a2b3bfbe"}, - {file = "pybcj-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:a74e70bf3fd50a413fdce4264e037b8e8f34cb8d9207ac364167b6eb076c14ec"}, - {file = "pybcj-1.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8e846a8272bf02202794fe22beaf389ed27c2d8ebf59aafb43af4935feac0389"}, - {file = "pybcj-1.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421f211fb15aeb836b4ba61174cb409fc82222ab3b2486deb4953ae863e6507b"}, - {file = "pybcj-1.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdabbe7fd66886943393ecf98318d7801dd40183af80314acd4464bccdd44d53"}, - {file = "pybcj-1.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:731800acfc6112132aa2b7d08f9d6fe49a0c0071b30985809d084e238af98dac"}, - {file = "pybcj-1.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:09872b32edad4e3653d5b357b244d267ca58fe52d4e1dd3cdff816d3bb9d9f7c"}, - {file = "pybcj-1.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5de90f8b6c7fc1d28dbe74c29b1d5053a7a8703cbc2c6f4f112907ffd7529f8e"}, - {file = "pybcj-1.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:10961ea10ae930b9348132707b9dd3cf3e71a41ef1df7656fbc4f14a71f10747"}, - {file = "pybcj-1.0.1-cp36-cp36m-win32.whl", hash = "sha256:6f589af70286ec6565e3415145a03abc3c14a23ed7ed198ac741de81af332f26"}, - {file = "pybcj-1.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b901f12380e988da07f21bb6b75da7f91fd9feffb43fcf70fad698e40a2ef3a7"}, - {file = "pybcj-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2e1859d36c073231737956fbeb5bbcfa8dba880e1b66bfbd001466718d6d89dc"}, - {file = "pybcj-1.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421ed75e54ebecd79c80178c1df5bdbe1e0e3e10e7efef5f011b5f0be6a9a12f"}, - {file = "pybcj-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872697d8bff2572e4225ed8cbce17be338faac28ec1ab3c00419aaef2f56dd3c"}, - {file = "pybcj-1.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc23f2ac2c1ded250f1aa66fbd1a3d823f76de549978b61eed4fb34affc11338"}, - {file = "pybcj-1.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8efed581f2ee74f1e0ec04a10e97881b93abc258d13b15ef966aee71732ac152"}, - {file = "pybcj-1.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb378b0f133e19d437eca4327bac7c3f38e30950c5c604092c72b18cba839bc2"}, - {file = "pybcj-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:534b8b253dbdb746c06bab28383db31d7e2b42aa9b33ed4e7836319622dcd75b"}, - {file = "pybcj-1.0.1-cp37-cp37m-win32.whl", hash = "sha256:15edd1786617127ecfda4274bbb04f09ae299c474ada86e369bcf050d5cb88dd"}, - {file = "pybcj-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:393d95f83e47976d137bcec7b66986f51282dcb2091933f88983dd7eb89e59c4"}, - {file = "pybcj-1.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e89a814f1727be7d543ac6910f0d94131f43a337e811ab684606d42dbc22b701"}, - {file = "pybcj-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b3861996b06b8238f799b4f1bd9542d1a8ae8e4765adbdde25ed011c3bda11df"}, - {file = "pybcj-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7801ee9a9fcd47b92d4d90ff9a28cfdc23195cad72bd8032938ab3c794942b43"}, - {file = "pybcj-1.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10182725b0e6aa944d13a10a4a9cb5208bafe0016b4326253340948153de4bc0"}, - {file = "pybcj-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fc313b1a5547c5416982853f2de1454980704f3ab3dbcad18dacdc565a2eafc"}, - {file = "pybcj-1.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b3773a77ae3b18778c9bf22c7ba6478a0e5416f84b7d2ac6d764001f6d0d985"}, - {file = "pybcj-1.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c72ff262613c9a6f20e80bcf1e8bbc000b78b95a7fa301164ab3e3bd23bd936c"}, - {file = "pybcj-1.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:262f53e27bca6096e3424c63e5e59948b10985eee4b03a5d70c3f3f6161a79e7"}, - {file = "pybcj-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:efe75e3b8768c4f9d454d3c1b2b2a67e757f2b00d638146d3a4cddb38460fc3a"}, - {file = "pybcj-1.0.1-cp38-cp38-win32.whl", hash = "sha256:a77796b4c5370cedd4fad2264b6d7a78cb40229c7fa3cbcab24df3adea768962"}, - {file = "pybcj-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:4d10dd75fad48555e9530c5565c7ccf13754adad2fe331feefb263055cdca7b3"}, - {file = "pybcj-1.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1c0e1657c233f9f4070ab578951e03d569f1b645042ce661341091f50e41b541"}, - {file = "pybcj-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:603daa737579cf69efb368fab716cdce18d0b2615af77bb623f5f42aa546b3d8"}, - {file = "pybcj-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df75707f466ab6fa086f164bff2df75fd16543c8d43ca43a268f938c1144e792"}, - {file = "pybcj-1.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda423836d7d69cba6a6f99e7a34c2e5fe3621e5e945cd25ea9ba60a96223254"}, - {file = "pybcj-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3847387b43af47d9677952b8a22d9c2d8a544c2175b6d5304c200669c05d39e1"}, - {file = "pybcj-1.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99f4291e59dcbe548be5a1e8c6a1a19a860184526c2d14fc374ec687b98ad7d"}, - {file = "pybcj-1.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:05fad9a905772774aacc96cb174571ac1f5afa80b9f54c6ec414d369865d305c"}, - {file = "pybcj-1.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d5c4ca6faff0af4b5f3e7d88d13ec76f8cac36c9bcc814b8c84d9f3f951b2cf9"}, - {file = "pybcj-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bc8720f3a224c27bd413a930b9bec5f225fda050641258967b1ebb252a053fb"}, - {file = "pybcj-1.0.1-cp39-cp39-win32.whl", hash = "sha256:d61f287f820787d3acf60d113c5ce6e506870d9d3103bc37a74373e72ce9d7a6"}, - {file = "pybcj-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:bbb49772fc3896850a704215160df8316db89e5e8876b2d8af6c6c15b4e0f6ea"}, - {file = "pybcj-1.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c1e02170f8d358a8ddc716606760c73d55eea6bdb0cca2d97b86447e9524708b"}, - {file = "pybcj-1.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1684b9f7ec92d2ae94a137ec311bd2227f684429521061af7ceed4952c7f72"}, - {file = "pybcj-1.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e6434a46f852cd3e6929633b43537887bd381bc614dbf5c4a128fdde4966b3a"}, - {file = "pybcj-1.0.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:795dff9229dc024e54bd0f618f5a3adb269ee0cccd7ac9a0bef29df388beed23"}, - {file = "pybcj-1.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:358dba3dc39a07cded6897b9f99bb5b951a0ad95d567eda535b44861caa02f5b"}, - {file = "pybcj-1.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6df9eccc99a0d7bc091b58cff2f507b89f076d657253975fa2ca9eb42dbb4733"}, - {file = "pybcj-1.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f46ba61c942ee64198444c9562c5cf089eaf97f17b413e15fa1c0614df304734"}, - {file = "pybcj-1.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f950ca403ffaa808a017e40e3371115bcb0b4b1061772b03e7d842555132ac"}, - {file = "pybcj-1.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6485c6b091504c0e6431a9495309271626eaa9ecb23276903486824f94f4c551"}, - {file = "pybcj-1.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:9b56eeff51efa556ecc186260ac486a4ddd79ad37bc88d669e96c45190f3c0da"}, - {file = "pybcj-1.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d5b327df02761c42399c878cd6c37f885bf0639befbd4d1ab763cd44ba1e0552"}, - {file = "pybcj-1.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:023082fd677f67ebd36fe96322a4a45ac33a2b340d49010d88e1867c76744c50"}, - {file = "pybcj-1.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8576a1dcf445ef064bf8c3b2cdc1d6353e41cb4b366329946883e285dcbcec0"}, - {file = "pybcj-1.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a5365edcaa82dc47e7757ba2efb48f96b9b352e3811a2aaa90084802479ddbe"}, - {file = "pybcj-1.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6ca6ddae1302477879099d4c4efc65790f4610d71ceff7fbe8f8b60f6ac6dcff"}, - {file = "pybcj-1.0.1.tar.gz", hash = "sha256:8b682ed08caabfb7c042d4be083e28ddc692afb1deff5567111f8855071b75c3"}, -] -pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pycryptodomex = [ - {file = "pycryptodomex-3.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6f5b6ba8aefd624834bc177a2ac292734996bb030f9d1b388e7504103b6fcddf"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:4540904c09704b6f831059c0dfb38584acb82cb97b0125cd52688c1f1e3fffa6"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0fadb9f7fa3150577800eef35f62a8a24b9ddf1563ff060d9bd3af22d3952c8c"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fc9bc7a9b79fe5c750fc81a307052f8daabb709bdaabb0fb18fb136b66b653b5"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f8be976cec59b11f011f790b88aca67b4ea2bd286578d0bd3e31bcd19afcd3e4"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:78d9621cf0ea35abf2d38fa2ca6d0634eab6c991a78373498ab149953787e5e5"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-win32.whl", hash = "sha256:b6306403228edde6e289f626a3908a2f7f67c344e712cf7c0a508bab3ad9e381"}, - {file = "pycryptodomex-3.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:48697790203909fab02a33226fda546604f4e2653f9d47bc5d3eb40879fa7c64"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:18e2ab4813883ae63396c0ffe50b13554b32bb69ec56f0afaf052e7a7ae0d55b"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3709f13ca3852b0b07fc04a2c03b379189232b24007c466be0f605dd4723e9d4"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:191e73bc84a8064ad1874dba0ebadedd7cce4dedee998549518f2c74a003b2e1"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e3164a18348bd53c69b4435ebfb4ac8a4076291ffa2a70b54f0c4b80c7834b1d"}, - {file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:5676a132169a1c1a3712edf25250722ebc8c9102aa9abd814df063ca8362454f"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:e2b12968522a0358b8917fc7b28865acac002f02f4c4c6020fcb264d76bfd06d"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux1_i686.whl", hash = "sha256:e47bf8776a7e15576887f04314f5228c6527b99946e6638cf2f16da56d260cab"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:996e1ba717077ce1e6d4849af7a1426f38b07b3d173b879e27d5e26d2e958beb"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:65204412d0c6a8e3c41e21e93a5e6054a74fea501afa03046a388cf042e3377a"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:dd452a5af7014e866206d41751886c9b4bf379a339fdf2dbfc7dd16c0fb4f8e0"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:b9279adc16e4b0f590ceff581f53a80179b02cba9056010d733eb4196134a870"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-win32.whl", hash = "sha256:46b3f05f2f7ac7841053da4e0f69616929ca3c42f238c405f6c3df7759ad2780"}, - {file = "pycryptodomex-3.15.0-cp35-abi3-win_amd64.whl", hash = "sha256:8eecdf9cdc7343001d047f951b9cc805cd68cb6cd77b20ea46af5bffc5bd3dfb"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:67e1e6a92151023ccdfcfbc0afb3314ad30080793b4c27956ea06ab1fb9bcd8a"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:c4cb9cb492ea7dcdf222a8d19a1d09002798ea516aeae8877245206d27326d86"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:94c7b60e1f52e1a87715571327baea0733708ab4723346598beca4a3b6879794"}, - {file = "pycryptodomex-3.15.0-pp27-pypy_73-win32.whl", hash = "sha256:04cc393045a8f19dd110c975e30f38ed7ab3faf21ede415ea67afebd95a22380"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0776bfaf2c48154ab54ea45392847c1283d2fcf64e232e85565f858baedfc1fa"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:463119d7d22d0fc04a0f9122e9d3e6121c6648bcb12a052b51bd1eed1b996aa2"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a07a64709e366c2041cd5cfbca592b43998bf4df88f7b0ca73dca37071ccf1bd"}, - {file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:35a8f7afe1867118330e2e0e0bf759c409e28557fb1fc2fbb1c6c937297dbe9a"}, - {file = "pycryptodomex-3.15.0.tar.gz", hash = "sha256:7341f1bb2dadb0d1a0047f34c3a58208a92423cdbd3244d998e4b28df5eac0ed"}, -] -pydot = [ - {file = "pydot-1.4.2-py2.py3-none-any.whl", hash = "sha256:66c98190c65b8d2e2382a441b4c0edfdb4f4c025ef9cb9874de478fb0793a451"}, - {file = "pydot-1.4.2.tar.gz", hash = "sha256:248081a39bcb56784deb018977e428605c1c758f10897a339fce1dd728ff007d"}, -] -pydub = [ - {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, - {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, -] -pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] -pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pyicu = [ - {file = "PyICU-2.10.2.tar.gz", hash = "sha256:0c3309eea7fab6857507ace62403515b60fe096cbfb4f90d14f55ff75c5441c1"}, -] -pymongo = [ - {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, - {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, - {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, - {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, - {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, - {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, - {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, - {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, - {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, - {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, - {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, - {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, - {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, - {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, - {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, - {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, - {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, - {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, - {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, - {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, - {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, - {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, - {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, - {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, - {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyppmd = [ - {file = "pyppmd-1.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8049c19af4b78b400b2347bff4514763257b55516c359144e9d8091991ed12e8"}, - {file = "pyppmd-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c0fd06aaf782e65b7b5bbc47f8a9dbe050c1ba18474ccbe0a2b37f57a8d8c72"}, - {file = "pyppmd-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e17b08a5c283faf48b4ee888f8fa53f919cd8afd0930eae4d59f719f6be519fb"}, - {file = "pyppmd-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71f994f281439705cb04c497adc2863551fa5813606af6fb26c673a44a36c4e3"}, - {file = "pyppmd-1.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31a09fd1b10518342ff442b57dd8c890b9bfea6bbdbb785c729f0d139092e42e"}, - {file = "pyppmd-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca6a926d229a6dbf2ccdb0d4e692d81ff927459b59a1cec14ef522522df6d757"}, - {file = "pyppmd-1.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6279f1c4b6aefacb95df49db2f2e232530592d1849c37b73478a4f26eb405d12"}, - {file = "pyppmd-1.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f7a1b08612627d5280ef2dad1fadb0b1a10c70df0c484f9091eff5fab5e4c84e"}, - {file = "pyppmd-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ecb83e0cc92960f959111518ea208b51a58e8cc303ff959e9cd2cc56dd36a63"}, - {file = "pyppmd-1.0.0-cp310-cp310-win32.whl", hash = "sha256:703c4fbc9b5e1454f403fb1d6b4a6c4c729f72eef14690146deecd2166429d6d"}, - {file = "pyppmd-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:6e2f5ff5071e4e43c92065f383753d4ad59778816485a01ee7b29e2a1ff48140"}, - {file = "pyppmd-1.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7e8d3c309061ae7fb40e4a26d30f8982b367abc562b9b8621cb79932cb3b94d9"}, - {file = "pyppmd-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5805c73590fb8f0ceb3e6cb115774b66a6f4700ae84b31d962ad69667e05dfbd"}, - {file = "pyppmd-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6a0c524be57698fe61fff893d485a9af21e6bc0aa2d385b71a63ff951921d4b6"}, - {file = "pyppmd-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d7cf4d0a9ced96ff1fa44de9ee3d65f5b06278c8f9a61c3edeb660f12f146b"}, - {file = "pyppmd-1.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61acfeee5ed59796037499119edd3159bf6b8c5fcaef17e295a2ca4103112d60"}, - {file = "pyppmd-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8dbe3076fe20c4d65cb1d1b51eeb17a1c177402b83100017a55daad888e198e"}, - {file = "pyppmd-1.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3bc75ed4e969b09fd1a766dd79cb3d5efe56edc11c86ac0b357b5648c7181ce2"}, - {file = "pyppmd-1.0.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:024f714ebb8ddf59dae164adc3c220c24555d470f4adb5bd022abc50298cfff3"}, - {file = "pyppmd-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ae419f71afa88784d53dd2449882af982bbd0328fa22a7e6a339221f3143918"}, - {file = "pyppmd-1.0.0-cp311-cp311-win32.whl", hash = "sha256:8680008b1b1e9e77f3337a1a53c1b32541cac9f93f79ae12d34de050585999ac"}, - {file = "pyppmd-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5fbec6f39a307818593508d8623d9328baf494137d191fc98e11f47e058ceee"}, - {file = "pyppmd-1.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a7240c00083527cf0b1bbdc92f6967e522efb9ad6968c953be174c390b091b3e"}, - {file = "pyppmd-1.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfb716a4a07ccbef84ed9fc31d012cef3b38404a6510e24d307cf64025999b21"}, - {file = "pyppmd-1.0.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12be01e919a34c6944568592b35451acf7c98ed18e005bb4b1c046ed520aff7f"}, - {file = "pyppmd-1.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d90d87377d83d909eafbf23301057fe16e6662c98ffea738159a234d9000a68"}, - {file = "pyppmd-1.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:12a783a0e3c76484a1bc93783867a36ab9a60de5b5298d57c9fe7348e848346e"}, - {file = "pyppmd-1.0.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b6b6c01e46fcf785ad6c272be400ebcbcb434a1d91150614e10de8cc569b8bff"}, - {file = "pyppmd-1.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:385a0b1341ebdfd7cb101c43eea130546830073c01bdb5036bca45c033ee633e"}, - {file = "pyppmd-1.0.0-cp36-cp36m-win32.whl", hash = "sha256:b8eee08c615ae9edd7bf1f214a377cac3d27417f22112685e581d4bab43029b0"}, - {file = "pyppmd-1.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:0e9c001719527dbafdd7fd8709b98bd63c173451c2eddbaa77abf62486a13da0"}, - {file = "pyppmd-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5847c47127ff9ea323f5910c62b9f136c3fab181a5144bfe72be13f051047357"}, - {file = "pyppmd-1.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63ddd5a81d6aaed9373cd9fc4de9529f10fa052aaf064ab283dc6218418cc5b5"}, - {file = "pyppmd-1.0.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:706d33cec3601d894f8a4a158bc652b7a3f01cd9e92c2da5d8711efeb9755835"}, - {file = "pyppmd-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e067e114f05918c8a4ab1fa6a070e2c7a9e497aa73fbf6d87a90e7a6e62a57"}, - {file = "pyppmd-1.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cecf0859b461bcf04439f32bcfb6e081016fa6204c92b5950d19d248fd1aad6b"}, - {file = "pyppmd-1.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:47ea218f7dfa94d15286c25d60db3091db1082ba958fa0a32ccaaaeaca7fc712"}, - {file = "pyppmd-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f751882dd529328ca43af8018f79cdd02ed707fcda30a2fa9acb1ee5c48261a6"}, - {file = "pyppmd-1.0.0-cp37-cp37m-win32.whl", hash = "sha256:18f863d58c4451e00765137be731c2b2150aff829468f59de4169e052429e1fd"}, - {file = "pyppmd-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05950d8a39fd9bf6c64572d69a6dd0a1af3fadf8d4a2a0bb62f5b04c0a618300"}, - {file = "pyppmd-1.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5331a7780d3444d7029e15e68385c94d6a26f688c1e87a5a9ee2e836ea6e4559"}, - {file = "pyppmd-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:62f970173baf80aad9472c7c6edca4a021ae7965174b1c5d6f400c9571e92efc"}, - {file = "pyppmd-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ccdfc8b2a1d73b2186850b9a5bd96106d5fd4419a620d344b0ab8bf630680cf8"}, - {file = "pyppmd-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63adeeb9dc4afd6d377ac1c9801f9539f9a81430e9c96d332023bf2ad6c04a1"}, - {file = "pyppmd-1.0.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca8a842b4ff671642b63ed4edd4e1ff7dc0ba0a7af4135758233f056ab992fca"}, - {file = "pyppmd-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1aeba466617cf975cd6719070ca9721bcd83a1a84bd8cf74c3a2808724481e"}, - {file = "pyppmd-1.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b0a87399ade5820f787758449976e758604c7739eb5f79ed9e594b5fa3a6a1bc"}, - {file = "pyppmd-1.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:74bd56b165283bb5586ff9ac7a896b217b3c94effe144b768279807840142bb4"}, - {file = "pyppmd-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ea4b1a326afe2055304740a03a233f7389f615179b9f6377264b306f619bfb11"}, - {file = "pyppmd-1.0.0-cp38-cp38-win32.whl", hash = "sha256:d2c3c16f644afb1b3caa4f6c717682030f7c3f54a12af8b1416b21877f0b5226"}, - {file = "pyppmd-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:a7f83970a057157c88d4a53a40431d07d8d3f38029ad2eae621422f955bd243b"}, - {file = "pyppmd-1.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:93d0d6ed97046ce25d64427ec493e06c23f32838972258bf11d603c9c998d6b3"}, - {file = "pyppmd-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1658714d012a5f9a8a3e67f3a9ede3519a2558064ccbd3163c39aca0cfd2412b"}, - {file = "pyppmd-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab4e29f774e064af09baf8478acd967684524e566b78fcc4f6f669757f0a2ab5"}, - {file = "pyppmd-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd227b8c292ac43d3297a91055fab51c27894dba39d04ccc774a72d9e6f85752"}, - {file = "pyppmd-1.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68184b7246ea73a92a764e16cc18b74ccf3c8d6bfc438bbace57aeb1914118a7"}, - {file = "pyppmd-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aafe6fc436a782e6d424a0ac00de08a1559b6d6ddd08031adbe791ff4e54c90"}, - {file = "pyppmd-1.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bae08176e0d3ed0a5cbd838ff1ac557dfa088a652af633ab1905ab35bb9d7bc4"}, - {file = "pyppmd-1.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c60031de93834e5cd262db4b27272101d04a9a18c4cc49f81d483221211a97c8"}, - {file = "pyppmd-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aee9c52a6f232f3f7c683b87213aa3a9eacd281ab31187e784290ba1c05024fe"}, - {file = "pyppmd-1.0.0-cp39-cp39-win32.whl", hash = "sha256:2858471a291b51fab49242d78bd67c2b7719368618a02e4aa995de8c855da73c"}, - {file = "pyppmd-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:1ec00b07c6b68feb402d6596f3575a7892ad69e4f455deee7b5301df703e60dd"}, - {file = "pyppmd-1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ac19ec1b6e3a0aadc1537466f537017189373593e23fe254df050fdd01f4a722"}, - {file = "pyppmd-1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10c8a41093952cde52b6d89488dc601ee7b10f6c95c430488f68987393777b46"}, - {file = "pyppmd-1.0.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea6a0d26db17027805a804d013cf761d732df5bce9d6b314cd1c727fe347277"}, - {file = "pyppmd-1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c31e5b331923f3b3b2cfbc66a60ecfd73db1a19a646bd1faf25bfde709a80d0"}, - {file = "pyppmd-1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f488164e8876d213b0627a7a6cb798081eaf84fd9ec6dde5a1668296b15e1a6c"}, - {file = "pyppmd-1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6059ea0c9acc3b52b2961412ac75d1da72656f8b69bb8fc3d92eec6776176011"}, - {file = "pyppmd-1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadc63d0ac83f8c5744eb34ea47a70ff7bfab519b293482d7ccb09946c374dc7"}, - {file = "pyppmd-1.0.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09668aa43e4f02b8725e6233dfc66e532c72f0e69fa1b34dd814a9f7200e0496"}, - {file = "pyppmd-1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f79ebcd7312b541d3520e1a0d4c362731e24403e2f9f6761679b2ad819d5c706"}, - {file = "pyppmd-1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:556b6a3af3fca2b41ca25f51c481e5df8df4da842fc5a567da7bb099cfa52423"}, - {file = "pyppmd-1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f9a3782f5accab4186d68c86defc61fcc7d0146e9cdc5b54e18656852c71db16"}, - {file = "pyppmd-1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c6c40f15b9fdea10bf966e5b07ee0a0ebcb8cf188ed9a466029c894816b303"}, - {file = "pyppmd-1.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a90b98f9d501eaedaca4d0e82f9e771bd2d780d71effcdeacc9fc6180a00e07"}, - {file = "pyppmd-1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a3b9192714b3e4773fc49c100ca13defa2502cb38e56205eb5a131ccf555d"}, - {file = "pyppmd-1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f1e7a1747518b5822eb755f3715d88bd1459e24de828aed86b7c1aa35e3ed76"}, - {file = "pyppmd-1.0.0.tar.gz", hash = "sha256:075c9bd297e3b0a87dd7aeabca7fee668218acbe69ecc1c6511064558de8840f"}, -] -pysocks = [ - {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, - {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, - {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, -] -pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] -pytest-cov = [ - {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, - {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -python-dotenv = [ - {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, - {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, -] -pytz = [ - {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, - {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -pyzstd = [ - {file = "pyzstd-0.15.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da9907e447d41650c00b8023be6789f7c2133eca3c6a0f72200ff25df29c0bf5"}, - {file = "pyzstd-0.15.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe24509154d3d7c16195fa2c0b546e160c947b09fd49ca08702abdcc5bc0c933"}, - {file = "pyzstd-0.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db93acdde600fd91ef329300de3d03d36d5dcb99533d69c85a0b58c27a0f4e53"}, - {file = "pyzstd-0.15.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:946eefc64d3a4d756920986dcc5043b7fdef179fa2f9dcb1a77dac2821abd934"}, - {file = "pyzstd-0.15.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a316c14ba55191a2eb32b04b7e1468fcac73e5fcd287b189650e1238dec9183"}, - {file = "pyzstd-0.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:082b08c1c0e8a9441e5f48c0f44fe4cee201c682f730c838ef86ed8619cda760"}, - {file = "pyzstd-0.15.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ef2ef67b2e54168d303f95f074f21e3e98e491f9d700ec2fa3266f6c0c71b5e"}, - {file = "pyzstd-0.15.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5bb9e6581db70e99973bb4267014e2dd48ed271d6ae3581fc92f73ad82f36dc1"}, - {file = "pyzstd-0.15.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:92894764d892d50a076607848fdbac4d011fcd35fc73eaedb93bab4442e502d7"}, - {file = "pyzstd-0.15.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08d1b45aed0df32b07ba1d1c49fe892d6fef4d4c97d1133d8836350a78a93244"}, - {file = "pyzstd-0.15.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:22e38237ace69bb6810734da1a3c2dd267fb5d7f68a35e175565bdc7ce6bb04a"}, - {file = "pyzstd-0.15.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fd98100fce2d6eb5ad3f7292c3498986e37397d50c6e3b04adc3c91c3d26bd9d"}, - {file = "pyzstd-0.15.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:49cd5bb0220f16bf41b80a0301f63fd4f5713cdc1795919891f8904f82f31768"}, - {file = "pyzstd-0.15.3-cp310-cp310-win32.whl", hash = "sha256:8218f953895e6d43a789b9a53a4945531d1ad4e76b64ac2d88e0a8980cc1d9e4"}, - {file = "pyzstd-0.15.3-cp310-cp310-win_amd64.whl", hash = "sha256:4af74993d8eb032105b7640c4e4af161f8093447a62de3f2c9f14493576a95e7"}, - {file = "pyzstd-0.15.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7841e3bcfdf18818bb3408b151351a51440895525d62a55f157586a55032e40"}, - {file = "pyzstd-0.15.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a381a0d1f3e3fdb93460ae93c5c1057d894d436c5d97b2c2bf8f015d28632afe"}, - {file = "pyzstd-0.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b087a31541d81d0fc5769f95dda8c5a45e50d9c67098c643d5cd55d565b6441b"}, - {file = "pyzstd-0.15.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a91a8d60a3a42e920d3ed02b327010c670ce83c6c79206657af3f6a61dde418c"}, - {file = "pyzstd-0.15.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e11bdfe4478449244eb2d677c8881a80ae94ae08cded8051e82c73d6725fde17"}, - {file = "pyzstd-0.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cad4e6b6760de73f952312c0770069876358122a7e8e296183d833cbf465c5"}, - {file = "pyzstd-0.15.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1631cc546c30da82b4bfb07bfc53aa46ce765800c4c839aabdd9df0f49c6bf6"}, - {file = "pyzstd-0.15.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:35f1aa7c87b613a09fba9b6cfc1b6fbeddeee8fd1b3ba25facabdb53fa1b17fe"}, - {file = "pyzstd-0.15.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:49b27434e7d247a8326713f4a30d8d2447120e5f8b523400df1b5274b6a721e6"}, - {file = "pyzstd-0.15.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:9253059f2ae2721405e9f45b34f907ab29f6e671e2bfda1593c3114a46673bed"}, - {file = "pyzstd-0.15.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5367743837ca7f46fbbdbb0faafc3e99b22bb6132fe78cf40892b8ba10367b5b"}, - {file = "pyzstd-0.15.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d7af343043d27330caa915a942fc6579b7360d5fce7a1065476c846f9988a0f"}, - {file = "pyzstd-0.15.3-cp311-cp311-win32.whl", hash = "sha256:df7c22c019249031da18ca350e087c8357576cfaf2970be6cc6e5b9604a4255f"}, - {file = "pyzstd-0.15.3-cp311-cp311-win_amd64.whl", hash = "sha256:b543ae7a2449caa96fe4427fb83e0b004a9f4ca9fd943edf8296a73dfa7c0a69"}, - {file = "pyzstd-0.15.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b68c2f51a8c6dd9bc66e7bba8e59e99c2a91112ec75c18e53a880b2dbc6e8e68"}, - {file = "pyzstd-0.15.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b0576ce2165a3a95b222e6514013105218d56b81857a1b694514eb63fbbdc5a"}, - {file = "pyzstd-0.15.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6fe2fad80743e60f969b02f9ab056f8a005974d5c34f1a9b3eca1df8f56b756"}, - {file = "pyzstd-0.15.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae0e5532bb41e830c257dec2abfe94bf8ab09afebe7ecf710d6d3cfa35d6aea"}, - {file = "pyzstd-0.15.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5eee632a69c8c0ab78215cae44f9944d906771622564f2a90fd7374739eeb5"}, - {file = "pyzstd-0.15.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dadd0cbeda15cf89abcd814b5478f1f17e22444113d35428cd62d0b651a35a19"}, - {file = "pyzstd-0.15.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:36754d70db264943ad9cb21a5130d3bce9d62ac98a645a2d90adfb9cd548eb21"}, - {file = "pyzstd-0.15.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:761bcbf2aa04fcf33fee3e114d832b0858dfefb6824ce585757e7b793a0b2deb"}, - {file = "pyzstd-0.15.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:3e7fb44fd42abf6db9850f27662763c52f2f282851c7a5af790655cf593016ce"}, - {file = "pyzstd-0.15.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:b81786383dcc62e1bc40055e14f268a6bea5818b63efbfb4514083e91f3ba111"}, - {file = "pyzstd-0.15.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:3e073dbbb16c2c815299037134c697f044bae142ca02142a10cb72de874580ea"}, - {file = "pyzstd-0.15.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:04cbf37ac09ca272143dff90d54f4856869bdd3b40eb262f625e4cc785efdd3b"}, - {file = "pyzstd-0.15.3-cp36-cp36m-win32.whl", hash = "sha256:4d7e876fea4ded82233c2fc2df4c56b00433db351bb21f401507e7dea7c16819"}, - {file = "pyzstd-0.15.3-cp36-cp36m-win_amd64.whl", hash = "sha256:4fb7d0267a025509b22c1eaa4110563aa60ca27ca4cab24e3aac7a8770ce944b"}, - {file = "pyzstd-0.15.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:287b3f15d1f473674b3689fda5c7143b396d5dd53360b450560823485dbfdd8e"}, - {file = "pyzstd-0.15.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b840c7056759da13f8634856945f2b6855335b7e759ee27003f4c42c57676d8"}, - {file = "pyzstd-0.15.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1310deeb8af68a0c1b32b069776b4352c7e5f2d8ac60f79955606c49a9852bc"}, - {file = "pyzstd-0.15.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30e58666f97cbfea43e12d93b482a86d1e79771609dbb8f095d30a0cbb69d0d6"}, - {file = "pyzstd-0.15.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66f9de313d1ba7746d61698c14573f597b5a9d562041828139a3eecd62efa240"}, - {file = "pyzstd-0.15.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0c84b349b4dbabbc1e004b80b1cfcb8dc78442c10a81636dfa9ee94c028ed9b"}, - {file = "pyzstd-0.15.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e8e11d7503f48a6d46d5962c953f17f12b7e001af9c64d58d3ab195981066cc"}, - {file = "pyzstd-0.15.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d7204054567bd41e57f10f9134c4210edbb9eab9cea55e9081dd388461b2c794"}, - {file = "pyzstd-0.15.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:eb27499fab77ce5838d42067a964b454b5784913e6fa0e1e6841e3b183c11154"}, - {file = "pyzstd-0.15.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23903d2da8b650358ce67c4a2125e8d1d9a7c9ebf959011832dcb2779f7fb51c"}, - {file = "pyzstd-0.15.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d41a41e81f66002eed7e0df49ee2893b41068c1866612f59fe2751823a1c650c"}, - {file = "pyzstd-0.15.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:86f56db8a082da130d1ca67e9181bcf42deab75527b3f2a35e5e6144f3f0691a"}, - {file = "pyzstd-0.15.3-cp37-cp37m-win32.whl", hash = "sha256:a10ef9ab262f117a379158cd2ff262caf48ec4e35f54554a971bfa698a33a530"}, - {file = "pyzstd-0.15.3-cp37-cp37m-win_amd64.whl", hash = "sha256:48e81e5e4f315164790163ff503b0dce7b4ad519cc954215033c683d0fd0f9cd"}, - {file = "pyzstd-0.15.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7fc780a067b3754b913268481aa0bd9d80cac1d2a9c1e1c7abb7102ab4726903"}, - {file = "pyzstd-0.15.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:db69b7bf80935d3c3da0dff4000e8a94f0224f98c312914190af79932ae421d5"}, - {file = "pyzstd-0.15.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed0826eab6ab133b8f8bb0369d76e546dad70a94b372b6d6351ba8320ec33615"}, - {file = "pyzstd-0.15.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0faf892a3f20258da72fd83ad0b394e8ebcbe3a637735870528529f3aef3c676"}, - {file = "pyzstd-0.15.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e4694db47c8816c499d7d4240abcec5154a227f454a30041de5632faef11a41"}, - {file = "pyzstd-0.15.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:539f0157f2283e395a503022aab915a9f1577fd97d92ed27b85adceeaea3d24c"}, - {file = "pyzstd-0.15.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e887757277409fd02535d24dc0bfc48aa3b8c1990b0451dcb5157776c64cf0d3"}, - {file = "pyzstd-0.15.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:650fce9212410fdc82f1cb32d562e89f6dd1480d1cdbc0769e09235e236317c2"}, - {file = "pyzstd-0.15.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17de64690976caf4a355da8a9b06d6ad55b424899e7cf305c6b08b96c8b764f4"}, - {file = "pyzstd-0.15.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8cd3792aa54a6c4933d2b63e90252220d8f8347e424f39c5eaec10f3bc415f10"}, - {file = "pyzstd-0.15.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4e92706f6f579d78768f942cde4359195fc2750e58c4bf3c1c91929693e10fd0"}, - {file = "pyzstd-0.15.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4fa699945cbd1316657550c00e1fa998c1ab6df5e0aff60254b0eb768be38003"}, - {file = "pyzstd-0.15.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c3604d118493f2c09d387f9e569c2ebc71f07be148f57397bd485773945f192f"}, - {file = "pyzstd-0.15.3-cp38-cp38-win32.whl", hash = "sha256:11bcf59b869abc10cf7cd872bd3d113642c94e92a5b68fe990154945096f8c4e"}, - {file = "pyzstd-0.15.3-cp38-cp38-win_amd64.whl", hash = "sha256:3ad35fd4de3591d8c538fe1fc4192a5cfc8715727dd9d7bedf6aceae67ff3408"}, - {file = "pyzstd-0.15.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:041bc30b7c47e52ee418786fa806fbe42094f990353d3e685a9c96ed6a4d2212"}, - {file = "pyzstd-0.15.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a08887c9ea43f5b08f2c33dd92ffc8a26afb9d9e23e8cebc962bbba134719f3c"}, - {file = "pyzstd-0.15.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e33e9367f32c2422bbf1a33a4e965e5e2d076eb4042f97971b6acd19c0a16ae6"}, - {file = "pyzstd-0.15.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0134944e00345f89657716aca9a1d2280bef69aca7a0cd326dd10d33f3caae78"}, - {file = "pyzstd-0.15.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67a9f383ee8054a72e7389ba51b131cd5acf26c3c8137e45a460d30d350da3ac"}, - {file = "pyzstd-0.15.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66ffba0844b84b742764455244e582f24a172390d8e1f479900fa549b2acc96a"}, - {file = "pyzstd-0.15.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdedc13c0d67aaaa706310eb41248fb78e7bd3fdf335d2de8fdeb2d71574645"}, - {file = "pyzstd-0.15.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9ddc5a9ef8b09241fff58bbcb780bffaf85437d29ce516f2ac522c3c6d9f5fee"}, - {file = "pyzstd-0.15.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2b368e6f601824401d3f5e5f78319bb09b0d6d1c0d23175f71b82739de9d2218"}, - {file = "pyzstd-0.15.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1eb94243194db49c8d1d7ffdc51982d88459cb74b4ac5a6ecd64313a93927cf3"}, - {file = "pyzstd-0.15.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:9d1688fc5cd6c32cf648e6e86162b5f2a9bddfc317deb19893c0d53fa15145f4"}, - {file = "pyzstd-0.15.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0a4e098058d8262f33ab550eed3824bb9f044a62120c17f0bf886529b32bf1cc"}, - {file = "pyzstd-0.15.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:089e70d146d3d37cd1c8adbdb8700311752a2d3ad42323037c5fb4032a00f7f7"}, - {file = "pyzstd-0.15.3-cp39-cp39-win32.whl", hash = "sha256:6e486d38fd247fdecde5bafe4af47a6e583c46c0a0c34c098e4d8a291603a2f8"}, - {file = "pyzstd-0.15.3-cp39-cp39-win_amd64.whl", hash = "sha256:728be92bc42bdccfecef88fc93a56e6ea561919fe9e00a8ddccde644dc1ecc53"}, - {file = "pyzstd-0.15.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3cfe3961fef371f616255d36c5629b421ea1adf6eed341cc64223e84d544429f"}, - {file = "pyzstd-0.15.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106928b5926ead3cee7a121d50568ffaac89966e31a061f3faa2ec2c9dad8904"}, - {file = "pyzstd-0.15.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f8378d0f0726d12bf01f52e2448725236b98b2e629e4b1183433274213eb576"}, - {file = "pyzstd-0.15.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4695713ced2d1b3e34ffbe644c9bd855e5eceb85d6bff6b113302a2878951e2b"}, - {file = "pyzstd-0.15.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6fb1866aead975ff17c8094819f361b015704a0fb01468b65b5a82d2686b75d1"}, - {file = "pyzstd-0.15.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:992823978523ee3107cc75ea5ed49907212e04dd4beb0f2e5b22587c8ed9e395"}, - {file = "pyzstd-0.15.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:10edcb025eade0b92ecc3d801094b0511b5484c78cf43ac9b68be7d27710ba77"}, - {file = "pyzstd-0.15.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd4c60832749e34b23bd2e652d233c0283cff71a68f54f015f12bd682b780c4"}, - {file = "pyzstd-0.15.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb599d2a4275f5708216fd701756b956233f4cccd576bc3e10f7114e69779c2"}, - {file = "pyzstd-0.15.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71ad945b118ef231b0e7475ed998c9b4b62af8964e73510b66a2a71fbd977109"}, - {file = "pyzstd-0.15.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d4f972628947451154285a460aad40626301b269b949f205467a1947003583f6"}, - {file = "pyzstd-0.15.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ba2775501b126a3edec424a29d2afabdd6e65b36991c404ec29cbde713b1cfb0"}, - {file = "pyzstd-0.15.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d265e6c92fda25059452c604fa407c35d3a6ae51416b874c37f7c7bbccc4c1c7"}, - {file = "pyzstd-0.15.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef8e9886c96d59d9357a30b06862fd29887c9de8652454de4cc5d021d706ff9"}, - {file = "pyzstd-0.15.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73dfdd5b9ceea88b53ae2896054bc6b1e6e7e5d4c04b9a4a8c800d85a6b62056"}, - {file = "pyzstd-0.15.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33e94fbccea132044ffbd3523a376c1de5afb521ecfd54f44de4ac8d3681dddb"}, - {file = "pyzstd-0.15.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c0fbef0dbe12a34b8c1eb980c18ea39c432565a77922bc692eeb666fa77fe97b"}, - {file = "pyzstd-0.15.3.tar.gz", hash = "sha256:ac4edab5d3955343e8f7f287e62cd2882907d46bcba4b406a1e9f84aa2887472"}, -] -rarfile = [ - {file = "rarfile-4.0-py3-none-any.whl", hash = "sha256:1094869119012f95c31a6f22cc3a9edbdca61861b805241116adbe2d737b68f8"}, - {file = "rarfile-4.0.tar.gz", hash = "sha256:67548769229c5bda0827c1663dce3f54644f9dbfba4ae86d4da2b2afd3e602a1"}, -] -regex = [ - {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, - {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"}, - {file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"}, - {file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"}, - {file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"}, - {file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"}, - {file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"}, - {file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"}, - {file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"}, - {file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"}, - {file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"}, - {file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"}, - {file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"}, - {file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"}, - {file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"}, - {file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"}, - {file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"}, - {file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"}, - {file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"}, - {file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"}, - {file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"}, - {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, - {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -requests-oauthlib = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] -resampy = [ - {file = "resampy-0.4.2-py3-none-any.whl", hash = "sha256:4340b6c4e685a865621dfcf016e2a3dd49d865446b6025e30fe88567f22e052e"}, - {file = "resampy-0.4.2.tar.gz", hash = "sha256:0a469e6ddb89956f4fd6c88728300e4bbd186fae569dd4fd17dae51a91cbaa15"}, -] -resolvelib = [ - {file = "resolvelib-0.9.0-py2.py3-none-any.whl", hash = "sha256:597adcbdf81d62d0cde55d90faa8e79187ec0f18e5012df30bd7a751b26343ae"}, - {file = "resolvelib-0.9.0.tar.gz", hash = "sha256:40ab05117c3281b1b160105e10075094c5ab118315003c922b77673a365290e1"}, -] -responses = [ - {file = "responses-0.18.0-py3-none-any.whl", hash = "sha256:15c63ad16de13ee8e7182d99c9334f64fd81f1ee79f90748d527c28f7ca9dd51"}, - {file = "responses-0.18.0.tar.gz", hash = "sha256:380cad4c1c1dc942e5e8a8eaae0b4d4edf708f4f010db8b7bcfafad1fcd254ff"}, -] -rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, -] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] -scikit-learn = [ - {file = "scikit-learn-1.1.3.tar.gz", hash = "sha256:bef51978a51ec19977700fe7b86aecea49c825884f3811756b74a3b152bb4e35"}, - {file = "scikit_learn-1.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8e9dd76c7274055d1acf4526b8efb16a3531c26dcda714a0c16da99bf9d41900"}, - {file = "scikit_learn-1.1.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ee47f68d973cee7009f06edb956f2f5588a0f230f24a2a70175fd0ecf36e2653"}, - {file = "scikit_learn-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da5a2e95fef9805b1750e4abda4e834bf8835d26fc709a391543b53feee7bd0e"}, - {file = "scikit_learn-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:701181792a28c82fecae12adb5d15d0ecf57bffab7cf4bdbb52c7b3fd428d540"}, - {file = "scikit_learn-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:30e27721adc308e8fd9f419f43068e43490005f911edf4476a9e585059fa8a83"}, - {file = "scikit_learn-1.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5699cded6c0685426433c7e5afe0fecad80ec831ec7fa264940e50c796775cc5"}, - {file = "scikit_learn-1.1.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:2ee2c649f2231b68511aabb0dc827edd8936aad682acc6263c34aed11bc95dac"}, - {file = "scikit_learn-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d1c1394e38a3319ace620381f6f23cc807d8780e9915c152449a86fc8f1db21"}, - {file = "scikit_learn-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:250da993701da88bf475e7c5746abf1285ea0ae47e4d0917cd13afd6600bb162"}, - {file = "scikit_learn-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:fd3ee69d36d42a7dcbb17e355a5653af5fd241a7dfd9133080b3dde8d9e2aafb"}, - {file = "scikit_learn-1.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f5644663987ee221f5d1f47a593271b966c271c236fe05634e6bdc06041b5a2b"}, - {file = "scikit_learn-1.1.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:748f2bd632d6993e8918d43f1a26c380aeda4e122a88840d4c3a9af99d4239fe"}, - {file = "scikit_learn-1.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd55c6fbef7608dbce1f22baf289dfcc6eb323247daa3c3542f73d389c724786"}, - {file = "scikit_learn-1.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38814f66285318f2e241305cca545eaa9b4126c65aa5dd78c69371f235f78e2b"}, - {file = "scikit_learn-1.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:f4931f2a6c06e02c6c17a05f8ae397e2545965bc7a0a6cb38c8cd7d4fba8624d"}, - {file = "scikit_learn-1.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6785b8a3093329bf90ac01801be5525551728ae73edb11baa175df660820add4"}, - {file = "scikit_learn-1.1.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:28b2bd6a1419acd522ff45d282c8ba23dbccb5338802ab0ee12baa4ade0aba4c"}, - {file = "scikit_learn-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23fb9e74b813cc2528b5167d82ed08950b11106ccf50297161875e45152fb311"}, - {file = "scikit_learn-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5d4231af7199531e77da1b78a4cc6b3d960a00b1ec672578ac818aae2b9c35d"}, - {file = "scikit_learn-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:4d3a19166d4e1cdfcab975c68f471e046ce01e74c42a9a33fa89a14c2fcedf60"}, -] -scipy = [ - {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"}, - {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"}, - {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"}, - {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"}, - {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"}, - {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"}, - {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"}, - {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"}, - {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"}, - {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"}, - {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"}, - {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"}, - {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"}, - {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"}, - {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"}, - {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"}, - {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"}, - {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"}, - {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"}, - {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"}, - {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"}, -] -setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sklearn = [ - {file = "sklearn-0.0.post1.tar.gz", hash = "sha256:76b9ed1623775168657b86b5fe966d45752e5c87f528de6240c38923b94147c5"}, -] -smmap = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] -sortedcontainers = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] -soundfile = [ - {file = "soundfile-0.11.0-py2.py3-none-any.whl", hash = "sha256:f4e4f832b1958403fb9726eeea54e0ebf1c7fc2599ff296a7ab1ac062f8048c9"}, - {file = "soundfile-0.11.0-py2.py3-none-macosx_10_9_arm64.macosx_11_0_arm64.whl", hash = "sha256:9e6a62eefad0a7f856cc8f5ede7f1a0c196b65d2901c00fffc74a3d7e81d89c8"}, - {file = "soundfile-0.11.0-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:12f66fe9dcddedaa6c808bc3e104fc67fcee59dc64214bf7f43605e69836c497"}, - {file = "soundfile-0.11.0-py2.py3-none-win32.whl", hash = "sha256:08d9636815692f332e042990d449e79b888d288f0752226d8602e91523a0a29b"}, - {file = "soundfile-0.11.0-py2.py3-none-win_amd64.whl", hash = "sha256:a4ab6f66ad222d8e144dcb6abc73fbb867c11da2934b677f9b129778a6c65112"}, - {file = "soundfile-0.11.0.tar.gz", hash = "sha256:931738a1c93e8684c2d3e1d514ac63440ce827ec783ea0a2d3e4730e3dc58c18"}, -] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -stevedore = [ - {file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"}, - {file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"}, -] -tensorboard = [ - {file = "tensorboard-2.10.1-py3-none-any.whl", hash = "sha256:fb9222c1750e2fa35ef170d998a1e229f626eeced3004494a8849c88c15d8c1c"}, -] -tensorboard-data-server = [ - {file = "tensorboard_data_server-0.6.1-py3-none-any.whl", hash = "sha256:809fe9887682d35c1f7d1f54f0f40f98bb1f771b14265b453ca051e2ce58fca7"}, - {file = "tensorboard_data_server-0.6.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:fa8cef9be4fcae2f2363c88176638baf2da19c5ec90addb49b1cde05c95c88ee"}, - {file = "tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl", hash = "sha256:d8237580755e58eff68d1f3abefb5b1e39ae5c8b127cc40920f9c4fb33f4b98a"}, -] -tensorboard-plugin-wit = [ - {file = "tensorboard_plugin_wit-1.8.1-py3-none-any.whl", hash = "sha256:ff26bdd583d155aa951ee3b152b3d0cffae8005dc697f72b44a8e8c2a77a8cbe"}, -] -tensorflow = [ - {file = "tensorflow-2.10.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:dc3587dfa714be711d2681d5e2fb59037b18e83e692f084db49bce31b6268d15"}, - {file = "tensorflow-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3cab933757eb0c204dc4cf34d031939e33cae8f97a7aaef00a12678129b17f"}, - {file = "tensorflow-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f1d579b849afaea7b10f7693dc43b1d07321d279a016f01e2ddfe971d0d8af"}, - {file = "tensorflow-2.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a6049664f9a0d14b0a4a7e6f058be87b2d8c27be826d7dd9a870ff03683fbc0b"}, - {file = "tensorflow-2.10.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ae77b9fcf826cdb05e8c3c6cfcd0ce10b9adcf2ffe952e159cf6ef182f0f3682"}, - {file = "tensorflow-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8f6f1344cab3ef7e6c794b3e252bbedc764c198be645a5b396c3b67b8bc093"}, - {file = "tensorflow-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:886180162db50ac7c5f8e2affbeae32588c97d08e49089135c71052392913dca"}, - {file = "tensorflow-2.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:981b08964e132de71a37b98b6d5ec4204aa51bc9529ecc7fefcd01c33d7e7d53"}, - {file = "tensorflow-2.10.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f1c11fad08aa24f4838caf0aa1fba694bfaa323168d3e42e58387f5239943b56"}, - {file = "tensorflow-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7603cef40bee34cebdfbf264f9ce14c25529356f581f6fb5605f567efd92e07"}, - {file = "tensorflow-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee057aa57957b1a689c181bd406c30cbe152b7893c484fe6a26fcce6750f665"}, - {file = "tensorflow-2.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:5ef5e562e6baa9dc9f58db324668e7991caec546dfd5ed50647c734cd0d2daab"}, - {file = "tensorflow-2.10.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:18895381a123de287f94b1f76ceb56e86227a13e414a2928ab470d7c5b6b4c52"}, - {file = "tensorflow-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d07439c32b579b4c0251b494002e85954b37447286f2e65554f3ad940e496ff"}, - {file = "tensorflow-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab2d33039fc8b340feb3d1f56db2c3d4bb25f059089a42dbe067b879add61815"}, - {file = "tensorflow-2.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:153111af1d773033264f8591f5deffece180a1f16935b579f43edd83acb17584"}, -] -tensorflow-estimator = [ - {file = "tensorflow_estimator-2.10.0-py2.py3-none-any.whl", hash = "sha256:f324ea17cd57f16e33bf188711d5077e6b2e5f5a12c328d6e01a07b23888edcd"}, -] -tensorflow-io-gcs-filesystem = [ - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:22753dc28c949bfaf29b573ee376370762c88d80330fe95cfb291261eb5e927a"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:52988659f405166df79905e9859bc84ae2a71e3ff61522ba32a95e4dce8e66d2"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:698d7f89e09812b9afeb47c3860797343a22f997c64ab9dab98132c61daa8a7d"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:bbf245883aa52ec687b66d0fcbe0f5f0a92d98c0b1c53e6a736039a3548d29a1"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6d95f306ff225c5053fd06deeab3e3a2716357923cb40c44d566c11be779caa3"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:5fbef5836e70026245d8d9e692c44dae2c6dbc208c743d01f5b7a2978d6b6bc6"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:00cf6a92f1f9f90b2ba2d728870bcd2a70b116316d0817ab0b91dd390c25b3fd"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f76cbe1a784841c223f6861e5f6c7e53aa6232cb626d57e76881a0638c365de6"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c5d99f56c12a349905ff684142e4d2df06ae68ecf50c4aad5449a5f81731d858"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b6e2d275020fb4d1a952cd3fa546483f4e46ad91d64e90d3458e5ca3d12f6477"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6670e0da16c884267e896ea5c3334d6fd319bd6ff7cf917043a9f3b2babb1b3"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:bfed720fc691d3f45802a7bed420716805aef0939c11cebf25798906201f626e"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:cc062ce13ec95fb64b1fd426818a6d2b0e5be9692bc0e43a19cce115b6da4336"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:366e1eff8dbd6b64333d7061e2a8efd081ae4742614f717ced08d8cc9379eb50"}, - {file = "tensorflow_io_gcs_filesystem-0.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:9484893779324b2d34874b0aacf3b824eb4f22d782e75df029cbccab2e607974"}, -] -tensorflow-macos = [ - {file = "tensorflow_macos-2.10.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:dfd1dd478b3ae01e8d578c38083bef68bc838ceaa05a813b6788fe9e6ec19140"}, - {file = "tensorflow_macos-2.10.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:aa074b5442d3411e5416c5112531d8b78a8c469ca92fa41c0e0cf14428608bf3"}, - {file = "tensorflow_macos-2.10.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:e15ab56f39f3d2e7c07a72a5969f025e259b1d3fcb9c3f7217f17b62581c33a8"}, - {file = "tensorflow_macos-2.10.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:00be07ffcf8b6288fa3dd9a27a3b6dd0f6c85af5c3109451a1b7e720bb817d14"}, - {file = "tensorflow_macos-2.10.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f2ec603c5496c25fb1bcda8eb4166423bf023bfb7ae6cbdec0be8796ca67e866"}, - {file = "tensorflow_macos-2.10.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:3177a8a97149f78748eeb20009aa3dc27ae2c112ab6380886ec8620bbcb70f19"}, -] -termcolor = [ - {file = "termcolor-2.1.1-py3-none-any.whl", hash = "sha256:fa852e957f97252205e105dd55bbc23b419a70fec0085708fc0515e399f304fd"}, - {file = "termcolor-2.1.1.tar.gz", hash = "sha256:67cee2009adc6449c650f6bcf3bdeed00c8ba53a8cda5362733c53e0a39fb70b"}, -] -texttable = [ - {file = "texttable-1.6.7-py2.py3-none-any.whl", hash = "sha256:b7b68139aa8a6339d2c320ca8b1dc42d13a7831a346b446cb9eb385f0c76310c"}, - {file = "texttable-1.6.7.tar.gz", hash = "sha256:290348fb67f7746931bcdfd55ac7584ecd4e5b0846ab164333f0794b121760f2"}, -] -tfrecord = [ - {file = "tfrecord-1.14.1.tar.gz", hash = "sha256:0670dc3ec1de27d034506b9b7ba6f650ba8f7ca5f536c9c742c602ba6c0ffad3"}, -] -threadpoolctl = [ - {file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"}, - {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"}, -] -tokenizers = [ - {file = "tokenizers-0.13.2-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:a6f36b1b499233bb4443b5e57e20630c5e02fba61109632f5e00dab970440157"}, - {file = "tokenizers-0.13.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:bc6983282ee74d638a4b6d149e5dadd8bc7ff1d0d6de663d69f099e0c6bddbeb"}, - {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16756e6ab264b162f99c0c0a8d3d521328f428b33374c5ee161c0ebec42bf3c0"}, - {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b10db6e4b036c78212c6763cb56411566edcf2668c910baa1939afd50095ce48"}, - {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:238e879d1a0f4fddc2ce5b2d00f219125df08f8532e5f1f2ba9ad42f02b7da59"}, - {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47ef745dbf9f49281e900e9e72915356d69de3a4e4d8a475bda26bfdb5047736"}, - {file = "tokenizers-0.13.2-cp310-cp310-win32.whl", hash = "sha256:96cedf83864bcc15a3ffd088a6f81a8a8f55b8b188eabd7a7f2a4469477036df"}, - {file = "tokenizers-0.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:eda77de40a0262690c666134baf19ec5c4f5b8bde213055911d9f5a718c506e1"}, - {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a689654fc745135cce4eea3b15e29c372c3e0b01717c6978b563de5c38af9811"}, - {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3606528c07cda0566cff6cbfbda2b167f923661be595feac95701ffcdcbdbb21"}, - {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41291d0160946084cbd53c8ec3d029df3dc2af2673d46b25ff1a7f31a9d55d51"}, - {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7892325f9ca1cc5fca0333d5bfd96a19044ce9b092ce2df625652109a3de16b8"}, - {file = "tokenizers-0.13.2-cp311-cp311-win32.whl", hash = "sha256:93714958d4ebe5362d3de7a6bd73dc86c36b5af5941ebef6c325ac900fa58865"}, - {file = "tokenizers-0.13.2-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:da521bfa94df6a08a6254bb8214ea04854bb9044d61063ae2529361688b5440a"}, - {file = "tokenizers-0.13.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a739d4d973d422e1073989769723f3b6ad8b11e59e635a63de99aea4b2208188"}, - {file = "tokenizers-0.13.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cac01fc0b868e4d0a3aa7c5c53396da0a0a63136e81475d32fcf5c348fcb2866"}, - {file = "tokenizers-0.13.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0901a5c6538d2d2dc752c6b4bde7dab170fddce559ec75662cfad03b3187c8f6"}, - {file = "tokenizers-0.13.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ba9baa76b5a3eefa78b6cc351315a216232fd727ee5e3ce0f7c6885d9fb531b"}, - {file = "tokenizers-0.13.2-cp37-cp37m-win32.whl", hash = "sha256:a537061ee18ba104b7f3daa735060c39db3a22c8a9595845c55b6c01d36c5e87"}, - {file = "tokenizers-0.13.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c82fb87b1cbfa984d8f05b2b3c3c73e428b216c1d4f0e286d0a3b27f521b32eb"}, - {file = "tokenizers-0.13.2-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:ce298605a833ac7f81b8062d3102a42dcd9fa890493e8f756112c346339fe5c5"}, - {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51b93932daba12ed07060935978a6779593a59709deab04a0d10e6fd5c29e60"}, - {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6969e5ea7ccb909ce7d6d4dfd009115dc72799b0362a2ea353267168667408c4"}, - {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92f040c4d938ea64683526b45dfc81c580e3b35aaebe847e7eec374961231734"}, - {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3bc9f7d7f4c1aa84bb6b8d642a60272c8a2c987669e9bb0ac26daf0c6a9fc8"}, - {file = "tokenizers-0.13.2-cp38-cp38-win32.whl", hash = "sha256:efbf189fb9cf29bd29e98c0437bdb9809f9de686a1e6c10e0b954410e9ca2142"}, - {file = "tokenizers-0.13.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b4cb2c60c094f31ea652f6cf9f349aae815f9243b860610c29a69ed0d7a88f8"}, - {file = "tokenizers-0.13.2-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:b47d6212e7dd05784d7330b3b1e5a170809fa30e2b333ca5c93fba1463dec2b7"}, - {file = "tokenizers-0.13.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:80a57501b61ec4f94fb7ce109e2b4a1a090352618efde87253b4ede6d458b605"}, - {file = "tokenizers-0.13.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61507a9953f6e7dc3c972cbc57ba94c80c8f7f686fbc0876afe70ea2b8cc8b04"}, - {file = "tokenizers-0.13.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c09f4fa620e879debdd1ec299bb81e3c961fd8f64f0e460e64df0818d29d845c"}, - {file = "tokenizers-0.13.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:66c892d85385b202893ac6bc47b13390909e205280e5df89a41086cfec76fedb"}, - {file = "tokenizers-0.13.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e306b0941ad35087ae7083919a5c410a6b672be0343609d79a1171a364ce79"}, - {file = "tokenizers-0.13.2-cp39-cp39-win32.whl", hash = "sha256:79189e7f706c74dbc6b753450757af172240916d6a12ed4526af5cc6d3ceca26"}, - {file = "tokenizers-0.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:486d637b413fddada845a10a45c74825d73d3725da42ccd8796ccd7a1c07a024"}, - {file = "tokenizers-0.13.2.tar.gz", hash = "sha256:f9525375582fd1912ac3caa2f727d36c86ff8c0c6de45ae1aaff90f87f33b907"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tomlkit = [ - {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"}, - {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"}, -] -torch = [ - {file = "torch-1.10.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:8f3fd2e3ffc3bb867133fdf7fbcc8a0bb2e62a5c0696396f51856f5abf9045a8"}, - {file = "torch-1.10.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:258a0729fb77a3457d5822d84b536057cd119b08049a8d3c41dc3dcdeb48d56e"}, - {file = "torch-1.10.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:935e5ac804c5093c79f23a7e6ca5b912c166071aa9d8b4a0a3d6a85126d6a47b"}, - {file = "torch-1.10.2-cp36-cp36m-win_amd64.whl", hash = "sha256:65fd02ed889c63fd82bf1a440c5a94c1310c29f3e6f9f62add416d34da355d97"}, - {file = "torch-1.10.2-cp36-none-macosx_10_9_x86_64.whl", hash = "sha256:6a81f886823bbd15edc2dc0908fa214070df61c9f7ab8831f0a03630275cca5a"}, - {file = "torch-1.10.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3eee3cf53c1f8fb3f1fe107a22025a8501fc6440d14e09599ba7153002531f84"}, - {file = "torch-1.10.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ef99b8cca5f9358119b07956915faf6e7906f433ab4a603c160ae9de88918371"}, - {file = "torch-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d43bc3f3a2d89ae185ef96d903c935c335219231e57685658648396984e2a67a"}, - {file = "torch-1.10.2-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:6da1b877880435440a5aa9678ef0f01986d4886416844db1d97ebfb7fd1778d0"}, - {file = "torch-1.10.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ab77a9f838874f295ed5410c0686fa22547456e0116efb281c66ef5f9d46fe28"}, - {file = "torch-1.10.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9ef4c004f9e5168bd1c1930c6aff25fed5b097de81db6271ffbb2e4fb8b89319"}, - {file = "torch-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:376fc18407add20daa6bbaaffc5a5e06d733abe53bcbd60ef2532bfed34bc091"}, - {file = "torch-1.10.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:f281438ee99bd72ad65c0bba1026a32e45c3b636bc067fc145ad291e9ea2faab"}, - {file = "torch-1.10.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:3592d3dd62b32760c82624e7586222747fe2281240e8653970b35f1d6d4a434c"}, - {file = "torch-1.10.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fbaf18c1b3e0b31af194a9d853e3739464cf982d279df9d34dd18f1c2a471878"}, - {file = "torch-1.10.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:97b7b0c667e8b0dd1fc70137a36e0a4841ec10ef850bda60500ad066bef3e2de"}, - {file = "torch-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:901b52787baeb2e9e1357ca7037da0028bc6ad743f530e0040ae96ef8e27156c"}, - {file = "torch-1.10.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:5b68e9108bd7ebd99eee941686046c517cfaac5331f757bcf440fe02f2e3ced1"}, - {file = "torch-1.10.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:b07ef01e36b716d0d65ca60c4db0ac9d094a0e797d9b55290da4dcda91463b6c"}, -] -torchaudio = [ - {file = "torchaudio-0.10.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:fd7ea7dfe52456621e1fe8d40129d1d1e765a444fd16b43c494732835c23f2b0"}, - {file = "torchaudio-0.10.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6619b0e85bc47e559598c12d98aac7cfeb63e0910c121ef3e0611ff17d3f5753"}, - {file = "torchaudio-0.10.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:728b4bf7e9bb6f4d44b397e6f8ffc74e6588cff7c52cd03e8b76759fa895d46a"}, - {file = "torchaudio-0.10.2-cp36-cp36m-win_amd64.whl", hash = "sha256:e7b1463a7ab1322f0fb0b35b2e5aee6a8bde24709d2c1135b4db5ec4e72a94a8"}, - {file = "torchaudio-0.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f22f1130705015e33e3b40f840cedcaadabab08eb51ee71f15ad27746ce7be06"}, - {file = "torchaudio-0.10.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:557de9a84b4c4b04f83f1ef3abe6d2bc37f4e9ee7bd149b44568d5e3f145edb9"}, - {file = "torchaudio-0.10.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:57ef69529c4307db35f5fd5dd1bf295af1ae4cc5c82d82b87753ebe99ac91332"}, - {file = "torchaudio-0.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd7eb11904696b62a1948cc6bcb75628bfa7830b808b928e362368506997b285"}, - {file = "torchaudio-0.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7be36f12ed5b97a4b774257dba4e5f78f9e84edcd534f28ffdf6892c919aada7"}, - {file = "torchaudio-0.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:05e2f56a310d9914b434e49b4b77483d56ca4820d194123c9838ac61e14455ff"}, - {file = "torchaudio-0.10.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:773db781e7a8bcde8e171121ec0349833ca662e5338025f5f5a4d8846f91cacc"}, - {file = "torchaudio-0.10.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b4a8d319b85e0964f4def2a7a391feb5fcab1c08f71e790941e3826674b345c6"}, - {file = "torchaudio-0.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:e7556773ab4b2bbbb755cd84497db7e7ebf73fe05811ede5c51a560ea05a56b0"}, - {file = "torchaudio-0.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b5663ddd40cee794c8c59cf61c3ee9108832152e11956f766610f92f87f21244"}, - {file = "torchaudio-0.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:677cf720f52af0e2cbde105d8ab79acfdb8c4590880a35796005b6b09da7d767"}, - {file = "torchaudio-0.10.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:98f6ad7d1b7d8546e3f0eab55147a88d55a12c84b5fd3bd9b1516ffb97a5b8ec"}, - {file = "torchaudio-0.10.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ea40d7969693a9be92d2df5db3f2cfacf4b9d696a2770ea3735b8596fd8c82b9"}, - {file = "torchaudio-0.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c09e24489d6ff9765614c6dd7c0a3771ded338f879a9bdadd284a854fb8bf374"}, -] -tqdm = [ - {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"}, - {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, -] -transformers = [ - {file = "transformers-4.24.0-py3-none-any.whl", hash = "sha256:b7ab50039ef9bf817eff14ab974f306fd20a72350bdc9df3a858fd009419322e"}, - {file = "transformers-4.24.0.tar.gz", hash = "sha256:486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b"}, -] -trec-car-tools = [] -typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, -] -typer = [ - {file = "typer-0.4.2-py3-none-any.whl", hash = "sha256:023bae00d1baf358a6cc7cea45851639360bb716de687b42b0a4641cd99173f1"}, - {file = "typer-0.4.2.tar.gz", hash = "sha256:b8261c6c0152dd73478b5ba96ba677e5d6948c715c310f7c91079f311f62ec03"}, -] -types-requests = [ - {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"}, - {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, -] -types-urllib3 = [ - {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, - {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -ujson = [ - {file = "ujson-5.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff4928dc1e9704b567171c16787238201fdbf023665573c12c02146fe1e02eec"}, - {file = "ujson-5.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1dc2f46c31ef22b0aaa28cd71be897bea271e700636658d573df9c43c49ebbd0"}, - {file = "ujson-5.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6019e3480d933d3698f2ecb4b46d64bfadd64e718f04fac36e681f3254b49a93"}, - {file = "ujson-5.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5179088ef6487c475604b7898731a6ddeeada7702cfb2162155b016703a8475"}, - {file = "ujson-5.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c04ae27e076d81a3839047d8eed57c1e17e361640616fd520d752375e3ba8f0c"}, - {file = "ujson-5.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:60a4b481978ea2aad8fe8af1ecc271624d01b3cf4b09e9b643dd2fe19c07634c"}, - {file = "ujson-5.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a09d203983104918c62f2eef9406f24c355511f9217967df23e70fa7f5b54ff"}, - {file = "ujson-5.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b9812638d7aa8ecda2e8e1513fb4da999249603bffab7439a5f8f0bb362b0db"}, - {file = "ujson-5.5.0-cp310-cp310-win32.whl", hash = "sha256:33cd9084fefc74cbacf88c92fd260b61211e00bcde38d640c369e5dc34a2b4e1"}, - {file = "ujson-5.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:765d46f3d5e7a1d48075035e2d1a9164f683e3fccde834ca04602e6c588835bc"}, - {file = "ujson-5.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:278aa9d7cb56435c96d19f5d702e026bcf69f824e24b41e9b52706abd3565837"}, - {file = "ujson-5.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9585892091ae86045135d6a6129a644142d6a51b23e1428bb5de6d10bc0ce0c7"}, - {file = "ujson-5.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cdc46859024501c20ab74ad542cdf2f08b94b5ce384f2f569483fa3ed926d04"}, - {file = "ujson-5.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5bea13c73f36c4346808df3fa806596163a7962b6d28001ca2a391cab856089"}, - {file = "ujson-5.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3f4240d99d55eb97cb012e9adf401f5ed9cd827af0341ac44603832202b0d2"}, - {file = "ujson-5.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d93940664a5ccfd79f72dcb939b0c31a3479889f14f0eb95ec52976f8c0cae7d"}, - {file = "ujson-5.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:880c84ce59f49776cf120f77e7ca04877c97c6887917078dbc369eb47004d7cf"}, - {file = "ujson-5.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:977bf5be704a88d46bf5b228df8b44521b1f3119d741062191608b3a6a38f224"}, - {file = "ujson-5.5.0-cp311-cp311-win32.whl", hash = "sha256:e0b36257dc90194784531c3b922d8d31fb2b4d8e5adfd27aff4eee7174176365"}, - {file = "ujson-5.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:a34a5f034b339f69ef7f6a134c22d04b92e07b6ddc1dd65382e7e4ec65d6437d"}, - {file = "ujson-5.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f26544bc10c83a2ff9aa2e093500c1b473f327faae31fb468d591e5823333376"}, - {file = "ujson-5.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fd797a4837ba10671954e7c09010cec7aca67e09d193f4920a16beea5f66f65"}, - {file = "ujson-5.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d7cfac2547c93389fa303fc0c0eb6698825564e8389c41c9b60009c746207b6"}, - {file = "ujson-5.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4875cafc9a6482c04c7df52a725d1c41beb74913c0ff4ec8f189f1954a2afe9"}, - {file = "ujson-5.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0762a4fdf86e01f3f8d8b6b7158d01fdd870799ff3f402b676e358fcd879e7eb"}, - {file = "ujson-5.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6c7ae6e0778ab9610f5e80e0595957d101ab8de18c32a8c053a19943ef4831d0"}, - {file = "ujson-5.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:94874584b733a18b310b0e954d53168e62cd4a0fd9db85b1903f0902a7eb33e8"}, - {file = "ujson-5.5.0-cp37-cp37m-win32.whl", hash = "sha256:3b74467564814fbce322427a5664e6bcc7dae6dbc8acbef76300fe43ca4072ab"}, - {file = "ujson-5.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:59cdcd934385f36e8bd76aedc234371cc75c848d95bdce804ac8aa8744cfeffa"}, - {file = "ujson-5.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2e506ecf89b6b9d304362ccef770831ec242a52c89dab1b4aabf1ab0eb1d5ed6"}, - {file = "ujson-5.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10095160dbe6bba8059ad6677a01da251431f4c68041bf796dcac0956b34f8f7"}, - {file = "ujson-5.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5035bb997d163f346c22abcec75190e7e756a5349e7c708bd3d5fd7066a9a854"}, - {file = "ujson-5.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d12f2d2df195c8c4e49d2cdbad640353a856c62ca2c624d8b47aa33b65a2a2"}, - {file = "ujson-5.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a485117f97312bef45f5d79d2ff97eff4da503b8a04f3691f59d31141686459"}, - {file = "ujson-5.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:21678d7e068707e4d54bdfeb8c250ebc548b51e499aed778b22112ca31a79669"}, - {file = "ujson-5.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a9b1320d8363a42d857fae8065a2174d38217cdd58cd8dc4f48d54e0591271e"}, - {file = "ujson-5.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:701e81e047f5c0cffd4ac828efca68b0bd270c616654966a051e9a5f836b385e"}, - {file = "ujson-5.5.0-cp38-cp38-win32.whl", hash = "sha256:1cef44ea4973344baed3d50a5da4a8843de3a6af7dea7fadf0a594e53ce5892f"}, - {file = "ujson-5.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e510d288e613d6927796dfb728e13e4530fc83b9ccac5888a21f7860486eab21"}, - {file = "ujson-5.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e1135264bcd40965cd35b0869e36952f54825024befdc7a923df9a7d83cfd800"}, - {file = "ujson-5.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:703fd69d9cb21d6ec2086789df9be2cf8140a76ff127050c24007ea8940dcd3b"}, - {file = "ujson-5.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:849f2ff40264152f25589cb48ddb4a43d14db811f841ec73989bfc0c8c4853fa"}, - {file = "ujson-5.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf416a93e1331820c77e3429df26946dbd4fe105e9b487cd2d1b7298b75784a8"}, - {file = "ujson-5.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:593a0f6fb0e186c5ba65465ed6f6215a30d1efa898c25e74de1c8577a1bff6d0"}, - {file = "ujson-5.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c20cc83b0df47129ec6ed8a47fa7dcfc309c5bad029464004162738502568bb"}, - {file = "ujson-5.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6f83be8257b2f2dd6dea5ee62cd28db90584da7a7af1fba77a2102fc7943638a"}, - {file = "ujson-5.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8141f654432cf75144d6103bfac2286b8adf23467201590b173a74535d6be22d"}, - {file = "ujson-5.5.0-cp39-cp39-win32.whl", hash = "sha256:3fe1aea596f9539fc20cd9e52f098c842afc090168824fd4ca9744fe13151a03"}, - {file = "ujson-5.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a655f7b755cfc5c07f2116b6dcf0ba148c89adef9a6d40c1b0f1fada878c4345"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f19f11055ba2961eb39bdb1ff15763a53fca4fa0b5b624da3c7a528e83cdd09c"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d87c817b292efb748f1974f37e8bb8a8772ef92f05f84e507159360814bcc3f"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9681ec4c60d0da590552427d770636d9079038c30b265f507ccde23caa7823"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63d1ae1ca17bb2c847e298c7bcf084a73d56d434b4c50509fb93a4b4300b0b2"}, - {file = "ujson-5.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:95603eff711b8f3b9596e1c961dbeb745a792ba1904141612f194e07edd71e5f"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2d90414e3b4b44b39825049185959488e084ea7fcaf6124afd5c00893938b09d"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7471d4486f23518cff343f1eec6c68d1b977ed74c3e6cc3e1ac896b9b7d68645"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee9a2c9a4b2421e77f8fe33ed0621dea03c66c710707553020b1e32f3afb6240"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a8cb3c8637006c5bd8237ebb5992a76ba06e39988ad5cff2096227443e8fd6a"}, - {file = "ujson-5.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d9c89c521dc90c7564358e525f849b93ad1d710553c1491f66b8cce8113bc901"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ab011e3556a9a1d9461bd686870c527327765ed02fe53550531d6609a8a33ff"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603607f56a0ee84d9cd2c7e9b1d29b18a70684b94ee34f07b9ffe8dc9c8a9f81"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75bef34e69e7effb7b4849e3f830e3174d2cc6ec7273503fdde111c222dc9b3"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abfe83e082c9208891e2158c1b5044a650ecec408b823bf6bf16cd7f8085cafa"}, - {file = "ujson-5.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4ef4ab8352861b99bd7fedb1fc6df3ea7f7d5216c789ba6d859e4ea06f1a4c45"}, - {file = "ujson-5.5.0.tar.gz", hash = "sha256:b25077a971c7da47bd6846a912a747f6963776d90720c88603b1b55d81790780"}, -] -urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] -werkzeug = [ - {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, - {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, -] -wget = [ - {file = "wget-3.2.zip", hash = "sha256:35e630eca2aa50ce998b9b1a127bb26b30dfee573702782aa982f875e3f16061"}, -] -wheel = [ - {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, - {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, -] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] -xxhash = [ - {file = "xxhash-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e9141f224a4dc984ea016744aa40a8a040054ef91933b2f9c81ba18e5b9d06e"}, - {file = "xxhash-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b8810fa72d361262168c2b215e3cee223eb19b74806c08713b943f57f0c91fd6"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb07bb4125c432f500a76a84ef51c0eafc09afbd1479308c6e1e2bbb73a33bb4"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4582e12d9aa25530449a8cad4e9e8e973e0b2f28e77ef6504fc6f216f8f07406"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72b5d4eee98ecd25a2c647f7547a024585400ab13aa7ec837ebb8a25151bbef"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f50cfc3e80fe241d25e557f7ca5a145d3d557bdf738cd2d355bfe1324c28d21"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90d03b46fc06f9ca3c0a4db685df3efffeb880ebcef2ffee707057e09fb8cba2"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:540966f42ccd0d3d09539a7236fbfdce6b15d7be49ee5d5adaef0aa0d020cd1e"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2d65d773c4d9c8d1a88af8e0e2169910cfc3d425006e2eb18cd13a6391543ed1"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3a9e2052ac6520e1f56630ff689b2b85ccd24799d362493435cf46defe163cc1"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:77c6d1e0993e8a314f4a6aec911c12fbb4caf4f58223381d3d41fa153ae6924f"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aa4e22665290f2dfa608901c38b1a5f0d367280fd8adc5996356d7f4993f41f7"}, - {file = "xxhash-3.1.0-cp310-cp310-win32.whl", hash = "sha256:5c65cfb8fd3efd3d574a0cd4abbe59741f720334fa1d79e5366b34b0f4584b66"}, - {file = "xxhash-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8f32770a4b39ffe6a5785f87b66435b2e4048ba4a36334108ac5d375447ce667"}, - {file = "xxhash-3.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:effd26669365a71e1337977ba789c95029c9cb0ac26e7455255922d3c9ff8fff"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2324c31095708ceb7ee8c15b31bd1bea7376ca477748f9a20aba2e94d01fab1"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1980f79c8ab4536c01048eb6398db0ac2049292150064bef665fa4c89918f86c"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11d4af8d50b8b08835f653a96d58bb3658454144e5e4d28e369f4b3ad2bff4ea"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7b89e0b8b26cb86369ca0a7395299e0046930664ce96cbc07702504af9a26"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca24dd052adf23e1fd8fb5839d9046328e60222a866fa3c2761e90ddab1fc2b8"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f7f1b27db4798f7ebe599107c75b2a0648fc1f9d9226fa2771fc296c5593dc7e"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e24bab9aecdfba23c7feb3b173488ca6b3168a50095ff544dedc7caa0c05ac3c"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:cb3032588cd46bc6d16b2b35cd7ff5041fcc90423ae7c8f62263a029ff8f1e5d"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b9c56f45fd18879212b480dc44dc1da44a22d453e3b4038c4b686f6307124220"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:00f2603b91fbb6fd1c3b83b263a405834e2df393fd2bac6a86e2e0ecc511076c"}, - {file = "xxhash-3.1.0-cp36-cp36m-win32.whl", hash = "sha256:33f865b6eb9126a60345cf3106925a5039ef582b840d2df96f7777a160d0ef17"}, - {file = "xxhash-3.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:95175636d28943eaf3da331aa57c7d02756017880151e11f8476a2ef49dd35de"}, - {file = "xxhash-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b43b55e0d2d81f4ec8caaf165a0a96325d7dd4317770b663701700f9aee855ed"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:694fd42161959382b70ea3442ea017675071dafe8b960849d5a599c4538737d8"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a3d7cd6412a1d0d5e43be188a67f95326e5a8c5d2ae1ad10adf8f896e630091"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ab8f652ffaed3855d25f7666f63bf1ee81ead4d9d30cc9e592571a3959d2964"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2259f13de36b4e675840e50b16bcd5c6f7aec6f5e833af47b3a0186c19e92dd"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e86290486768b51478f320abca9fe61805578298b6e60719ec23bca85c60eec"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d08ebd5313f6811ef76cde0f97179374b047442b918089a09019fed53b9f9cef"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2e798287d6efcd51df12ac67f65ba7d78937be80c2c91bff2d17bf5404c63a24"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:91bf72f009114320d9cbb452d5995286f2c6f70b3f53041f72654c4c1a8b79bd"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d30df80c8bc56aa55f545b1840d84ad6f773a3623b3e1462f17ebbd93c4c69ae"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d4f9c8570e9adecae7d480090bcbf856b1d28f462c30c5cbe9f23b6418d6423"}, - {file = "xxhash-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:0bce4ce403129c6cee69860cf2597d04f29c714797c11e8ec3b2b7b3677c4586"}, - {file = "xxhash-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cf20bca461ae45273599be8635b60b517d2212b51d6d5d85fc8c441078eb02ab"}, - {file = "xxhash-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a9efbc1c780ef3b578486eb250f5e93b2934c918386d981d96b7a06bae90c4d4"}, - {file = "xxhash-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b16e7fc7991118c0b6cd9f5e141911110388f39df58b2996834619d2b956b4a8"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0f1e298f80e302fd1b859e0b27f997eae82e9e9592843a1df2ca79122365ac1"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4645b106732885fc488656657a5790dee4d8ffd123d2134647028f6575f2c05e"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a6176152a64955b3dff89dfb2d3c9a7c93e862cbc37c0858e8e25d1f3f3c"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd65c4850231324af7a613e5647c1c484f3dcbcde4a0e608d099050c684ae79"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b6cd0f781c198b0b53f78124658d0f407fbba7450e79d537505608bf4125ba"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ae86b9672ad4ef431b0e1d284530289382575e2569078071c7adcf5827b4995"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3cb1c759c2a863dd963bdca8504c7ae39388dd1ef189fca91b94f18acb7bde26"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e5d54d8e01c34acf3361bb58c5022a92abc9d5054b919a1d483679d59989bbff"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:362520c908887c85af832e16448bad58cb182d165a16241e3251bdd17bd427be"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6502a60ced52547e9c8e3eba3e5818ac0adca3e9abb5e32d2ee73a060f3dc362"}, - {file = "xxhash-3.1.0-cp38-cp38-win32.whl", hash = "sha256:4143a1ad5f436550fcc091c80e7af23ec31cca1991750391067b24b051dcd0d7"}, - {file = "xxhash-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:04812deabfdba3fa9cae57abb908a3f980bccbe9a4178f3e56afca2f1f625874"}, - {file = "xxhash-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:53e587e6625bc9a7cfa5e067837590a626ff4150000ae31be2af73a67d08ea8c"}, - {file = "xxhash-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:778f3c6007d280b6fff12f95c81d9c4ad6907632a0bfecf23aca18afb54319c0"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc93e4bd34fd14459672345ca4a070b0f28d906bea4b178373b4271498e38ec9"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65a9aac88cc960b3a21a52922d5846f1b15af7a5b937a26c7edee1d3fe80800c"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74a787be31b8493797d3e7dfac2b240ed443bcd1b42dfbb406629538f103667"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e030031f13455c9bfc10ed58301fbee8fad0e179cc6a1b15e899d71af2958f"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c627840400b90a90d989ebef431b146e89e44377de42cd2e75996bbed394e3c5"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:62cc09f3024ef1a0153e32ac6593025f20fae13b1bc5d08b639891ec110cacec"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:de9758872881d50d0946dfc6067b4782986de6af5ec74c266d47d85c699aa0de"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:216b90f30351fe434903bb039ae88df4c5ae43eb4320a012f6c73bec1d630213"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:84e9d14baccdf31e3c59ed34b9d00df52ad4db376dbbbaad936ea02b9be4a534"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f4089c92b7aac2ead222345b0368e7d69e7a61e7a56762ae2f5e8d67fb67349"}, - {file = "xxhash-3.1.0-cp39-cp39-win32.whl", hash = "sha256:dfa73020bc696a46dab2dddd28c76d1abcd0643dc8a2dd06a037392bda5cc5ec"}, - {file = "xxhash-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0602b45447934fd5b81b387e76512a7c7c946b571b3f9a7d7b2cd9d3a09f9041"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a06311c247f2f45619e754249ca6f868c349fbfb63979ce291c83151840952a2"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f109a872aba254ffebe3c749a6b7148463e5d3168ac5afb515f1a929e73feb8f"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4521fed12d111bb2691ca0dc01fa8b36f6c626f53d9ee54befcea957e1b4dbaa"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c60ff7946e46beaa873509c1ca41937b40fc3048620cbd8441bfe03aa053f33"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:379c37f2d46a441bdb71af79443990e21943ef644ffeed5662157a9a682d55be"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:daa1a37685759003680bd2775053bbf772c4f71ad3c729810ea4901535635d5e"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c58b2bc7240966f54db9ef7dcfcc988362b0a315c12ed13a778917457c8dfe9d"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efafa866662f6ab50f1ffb808424ca9373d2f3b4a73e6ea66432dce1779f501c"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea074722fa953a1a3bece979620e2f0b43f2dfca841de84aca32a477c2fdb658"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:369af334d58f6d2f716bd1557d3580c4c1235077090769abf1d54daec2b301a7"}, - {file = "xxhash-3.1.0.tar.gz", hash = "sha256:ac21b1e21dc6fdfee9a57b53f4777539d53a84f2e1546a3f802f159f9966bdc1"}, -] -yarl = [ - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, - {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, - {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, - {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, - {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, - {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, - {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, - {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, - {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, - {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, - {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, -] -zipp = [ - {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, - {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, -] -zstandard = [ - {file = "zstandard-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a65e0119ad39e855427520f7829618f78eb2824aa05e63ff19b466080cd99210"}, - {file = "zstandard-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fa496d2d674c6e9cffc561639d17009d29adee84a27cf1e12d3c9be14aa8feb"}, - {file = "zstandard-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f7c68de4f362c1b2f426395fe4e05028c56d0782b2ec3ae18a5416eaf775576"}, - {file = "zstandard-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a7a716bb04b1c3c4a707e38e2dee46ac544fff931e66d7ae944f3019fc55b8"}, - {file = "zstandard-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:72758c9f785831d9d744af282d54c3e0f9db34f7eae521c33798695464993da2"}, - {file = "zstandard-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04c298d381a3b6274b0a8001f0da0ec7819d052ad9c3b0863fe8c7f154061f76"}, - {file = "zstandard-0.19.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aef0889417eda2db000d791f9739f5cecb9ccdd45c98f82c6be531bdc67ff0f2"}, - {file = "zstandard-0.19.0-cp310-cp310-win32.whl", hash = "sha256:9d97c713433087ba5cee61a3e8edb54029753d45a4288ad61a176fa4718033ce"}, - {file = "zstandard-0.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:81ab21d03e3b0351847a86a0b298b297fde1e152752614138021d6d16a476ea6"}, - {file = "zstandard-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:593f96718ad906e24d6534187fdade28b611f8ed06e27ba972ba48aecec45fc6"}, - {file = "zstandard-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e21032efe673b887464667d09406bab6e16d96b09ad87e80859e3a20b6745b6"}, - {file = "zstandard-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:876567136b0359f6581ecd892bdb4ca03a0eead0265db73206c78cff03bcdb0f"}, - {file = "zstandard-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9087571729c968cd853d54b3f6e9d0ec61e45cd2c31e0eb8a0d4bdbbe6da2f"}, - {file = "zstandard-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8371217dff635cfc0220db2720fc3ce728cd47e72bb7572cca035332823dbdfc"}, - {file = "zstandard-0.19.0-cp311-cp311-win32.whl", hash = "sha256:126aa8433773efad0871f624339c7984a9c43913952f77d5abeee7f95a0c0860"}, - {file = "zstandard-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:0fde1c56ec118940974e726c2a27e5b54e71e16c6f81d0b4722112b91d2d9009"}, - {file = "zstandard-0.19.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:898500957ae5e7f31b7271ace4e6f3625b38c0ac84e8cedde8de3a77a7fdae5e"}, - {file = "zstandard-0.19.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660b91eca10ee1b44c47843894abe3e6cfd80e50c90dee3123befbf7ca486bd3"}, - {file = "zstandard-0.19.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55b3187e0bed004533149882ef8c24e954321f3be81f8a9ceffe35099b82a0d0"}, - {file = "zstandard-0.19.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6d2182e648e79213b3881998b30225b3f4b1f3e681f1c1eaf4cacf19bde1040d"}, - {file = "zstandard-0.19.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ec2c146e10b59c376b6bc0369929647fcd95404a503a7aa0990f21c16462248"}, - {file = "zstandard-0.19.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67710d220af405f5ce22712fa741d85e8b3ada7a457ea419b038469ba379837c"}, - {file = "zstandard-0.19.0-cp36-cp36m-win32.whl", hash = "sha256:f097dda5d4f9b9b01b3c9fa2069f9c02929365f48f341feddf3d6b32510a2f93"}, - {file = "zstandard-0.19.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f4ebfe03cbae821ef994b2e58e4df6a087470cc522aca502614e82a143365d45"}, - {file = "zstandard-0.19.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b80f6f6478f9d4ca26daee6c61584499493bf97950cfaa1a02b16bb5c2c17e70"}, - {file = "zstandard-0.19.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909bdd4e19ea437eb9b45d6695d722f6f0fd9d8f493e837d70f92062b9f39faf"}, - {file = "zstandard-0.19.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c90a44470f2999779057aeaf33461cbd8bb59d8f15e983150d10bb260e16e0"}, - {file = "zstandard-0.19.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:401508efe02341ae681752a87e8ac9ef76df85ef1a238a7a21786a489d2c983d"}, - {file = "zstandard-0.19.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47dfa52bed3097c705451bafd56dac26535545a987b6759fa39da1602349d7ba"}, - {file = "zstandard-0.19.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1a4fb8b4ac6772e4d656103ccaf2e43e45bd16b5da324b963d58ef360d09eb73"}, - {file = "zstandard-0.19.0-cp37-cp37m-win32.whl", hash = "sha256:d63b04e16df8ea21dfcedbf5a60e11cbba9d835d44cb3cbff233cfd037a916d5"}, - {file = "zstandard-0.19.0-cp37-cp37m-win_amd64.whl", hash = "sha256:74c2637d12eaacb503b0b06efdf55199a11b1d7c580bd3dd9dfe84cac97ef2f6"}, - {file = "zstandard-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2e4812720582d0803e84aefa2ac48ce1e1e6e200ca3ce1ae2be6d410c1d637ae"}, - {file = "zstandard-0.19.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4514b19abe6dbd36d6c5d75c54faca24b1ceb3999193c5b1f4b685abeabde3d0"}, - {file = "zstandard-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6caed86cd47ae93915d9031dc04be5283c275e1a2af2ceff33932071f3eeff4d"}, - {file = "zstandard-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ccc4727300f223184520a6064c161a90b5d0283accd72d1455bcd85ec44dd0d"}, - {file = "zstandard-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:879411d04068bd489db57dcf6b82ffad3c5fb2a1fdd30817c566d8b7bedee442"}, - {file = "zstandard-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c9ca56345b0c5574db47560603de9d05f63cce5dfeb3a456eb60f3fec737ff2"}, - {file = "zstandard-0.19.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d777d239036815e9b3a093fa9208ad314c040c26d7246617e70e23025b60083a"}, - {file = "zstandard-0.19.0-cp38-cp38-win32.whl", hash = "sha256:be6329b5ba18ec5d32dc26181e0148e423347ed936dda48bf49fb243895d1566"}, - {file = "zstandard-0.19.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d5bb598963ac1f1f5b72dd006adb46ca6203e4fb7269a5b6e1f99e85b07ad38"}, - {file = "zstandard-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:619f9bf37cdb4c3dc9d4120d2a1003f5db9446f3618a323219f408f6a9df6725"}, - {file = "zstandard-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b253d0c53c8ee12c3e53d181fb9ef6ce2cd9c41cbca1c56a535e4fc8ec41e241"}, - {file = "zstandard-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c927b6aa682c6d96225e1c797f4a5d0b9f777b327dea912b23471aaf5385376"}, - {file = "zstandard-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f01b27d0b453f07cbcff01405cdd007e71f5d6410eb01303a16ba19213e58e4"}, - {file = "zstandard-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c7560f622e3849cc8f3e999791a915addd08fafe80b47fcf3ffbda5b5151047c"}, - {file = "zstandard-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e892d3177380ec080550b56a7ffeab680af25575d291766bdd875147ba246a91"}, - {file = "zstandard-0.19.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60a86b7b2b1c300779167cf595e019e61afcc0e20c4838692983a921db9006ac"}, - {file = "zstandard-0.19.0-cp39-cp39-win32.whl", hash = "sha256:755020d5aeb1b10bffd93d119e7709a2a7475b6ad79c8d5226cea3f76d152ce0"}, - {file = "zstandard-0.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:55a513ec67e85abd8b8b83af8813368036f03e2d29a50fc94033504918273980"}, - {file = "zstandard-0.19.0.tar.gz", hash = "sha256:31d12fcd942dd8dbf52ca5f6b1bbe287f44e5d551a081a983ff3ea2082867863"}, -] diff --git a/workers/splits/poetry.toml b/workers/splits/poetry.toml deleted file mode 100644 index 5fcef8cd..00000000 --- a/workers/splits/poetry.toml +++ /dev/null @@ -1,3 +0,0 @@ -[virtualenvs] -in-project = true -prefer-active-python = true diff --git a/workers/splits/src/splits/__init__.py b/workers/splits/src/splits/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/workers/splits/src/splits/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/workers/splits/src/splits/config.py b/workers/splits/src/splits/config.py deleted file mode 100644 index 9ee54c7e..00000000 --- a/workers/splits/src/splits/config.py +++ /dev/null @@ -1,42 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import datasets.config -from datasets.utils.logging import log_levels, set_verbosity -from libcommon.config import ( - CacheConfig, - CommonConfig, - ProcessingGraphConfig, - QueueConfig, - WorkerConfig, -) - - -class AppConfig: - cache: CacheConfig - common: CommonConfig - processing_graph: ProcessingGraphConfig - queue: QueueConfig - worker: WorkerConfig - - def __init__(self): - # First process the common configuration to setup the logging - self.common = CommonConfig() - self.cache = CacheConfig() - self.queue = QueueConfig() - self.processing_graph = ProcessingGraphConfig() - self.worker = WorkerConfig() - self.setup() - - def setup(self): - # Ensure the datasets library uses the expected HuggingFace endpoint - datasets.config.HF_ENDPOINT = self.common.hf_endpoint - # Don't increase the datasets download counts on huggingface.co - datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False - # Set logs from the datasets library to the least verbose - set_verbosity(log_levels["critical"]) - - # Note: self.common.hf_endpoint is ignored by the huggingface_hub library for now (see - # the discussion at https://github.com/huggingface/datasets/pull/5196), and this breaks - # various of the datasets functions. The fix, for now, is to set the HF_ENDPOINT - # environment variable to the desired value. diff --git a/workers/splits/src/splits/main.py b/workers/splits/src/splits/main.py deleted file mode 100644 index f1c40282..00000000 --- a/workers/splits/src/splits/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from splits.config import AppConfig -from splits.worker import SplitsWorker - -if __name__ == "__main__": - app_config = AppConfig() - SPLITS_ENDPOINT = "/splits" - SplitsWorker(app_config=app_config, endpoint=SPLITS_ENDPOINT).loop() diff --git a/workers/splits/tests/__init__.py b/workers/splits/tests/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/workers/splits/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/workers/splits/tests/conftest.py b/workers/splits/tests/conftest.py deleted file mode 100644 index 827efefe..00000000 --- a/workers/splits/tests/conftest.py +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from pytest import MonkeyPatch, fixture - -from splits.config import AppConfig - -# Import fixture modules as plugins -pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixtures.hub"] - - -# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 -@fixture(scope="session") -def monkeypatch_session(hf_endpoint: str, hf_token: str): - monkeypatch_session = MonkeyPatch() - monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") - monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") - monkeypatch_session.setenv("COMMON_HF_ENDPOINT", hf_endpoint) - monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) - yield monkeypatch_session - monkeypatch_session.undo() - - -@fixture(scope="session", autouse=True) -def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: - app_config = AppConfig() - if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: - raise ValueError("Test must be launched on a test mongo database") - return app_config diff --git a/workers/splits/tests/fixtures/__init__.py b/workers/splits/tests/fixtures/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/workers/splits/tests/fixtures/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/workers/splits/tests/fixtures/data/test_image_rgb.jpg b/workers/splits/tests/fixtures/data/test_image_rgb.jpg deleted file mode 100644 index e131e8ec..00000000 Binary files a/workers/splits/tests/fixtures/data/test_image_rgb.jpg and /dev/null differ diff --git a/workers/splits/tests/fixtures/datasets.py b/workers/splits/tests/fixtures/datasets.py deleted file mode 100644 index e397c2ec..00000000 --- a/workers/splits/tests/fixtures/datasets.py +++ /dev/null @@ -1,24 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Any, Mapping - -import pytest -from datasets import Audio, Dataset, Features -from datasets.features.features import FeatureType - - -def other(content: Any, feature_type: FeatureType = None) -> Dataset: - return ( - Dataset.from_dict({"col": [content]}) - if feature_type is None - else Dataset.from_dict({"col": [content]}, features=Features({"col": feature_type})) - ) - - [email protected](scope="session") -def datasets() -> Mapping[str, Dataset]: - sampling_rate = 16_000 - return { - "audio": other({"array": [0.1, 0.2, 0.3], "sampling_rate": sampling_rate}, Audio(sampling_rate=sampling_rate)), - } diff --git a/workers/splits/tests/fixtures/files.py b/workers/splits/tests/fixtures/files.py deleted file mode 100644 index 4a2dd290..00000000 --- a/workers/splits/tests/fixtures/files.py +++ /dev/null @@ -1,24 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import csv - -import pytest - -DATA = [ - {"col_1": "0", "col_2": 0, "col_3": 0.0}, - {"col_1": "1", "col_2": 1, "col_3": 1.0}, - {"col_1": "2", "col_2": 2, "col_3": 2.0}, - {"col_1": "3", "col_2": 3, "col_3": 3.0}, -] - - [email protected](scope="session") -def csv_path(tmp_path_factory: pytest.TempPathFactory) -> str: - path = str(tmp_path_factory.mktemp("data") / "dataset.csv") - with open(path, "w", newline="") as f: - writer = csv.DictWriter(f, fieldnames=["col_1", "col_2", "col_3"]) - writer.writeheader() - for item in DATA: - writer.writerow(item) - return path diff --git a/workers/splits/tests/fixtures/hub.py b/workers/splits/tests/fixtures/hub.py deleted file mode 100644 index b9032510..00000000 --- a/workers/splits/tests/fixtures/hub.py +++ /dev/null @@ -1,329 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -# Adapted from https://github.com/huggingface/datasets/blob/main/tests/fixtures/hub.py - -import time -from contextlib import contextmanager, suppress -from pathlib import Path -from typing import Any, Iterable, List, Mapping, Optional, Tuple, TypedDict - -import datasets.config -import pytest -import requests -from datasets import Dataset -from huggingface_hub.hf_api import ( - REPO_TYPES, - REPO_TYPES_URL_PREFIXES, - HfApi, - hf_raise_for_status, -) - - -def get_default_config_split(dataset: str) -> Tuple[str, str, str]: - config = dataset.replace("/", "--") - split = "train" - return dataset, config, split - - -# see https://github.com/huggingface/moon-landing/blob/main/server/scripts/staging-seed-db.ts -CI_HUB_USER = "__DUMMY_DATASETS_SERVER_USER__" -CI_HUB_USER_API_TOKEN = "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" - -CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" -CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE = CI_HUB_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" - - [email protected](autouse=True) -def ci_hfh_hf_hub_url(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr( - "huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE", CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE - ) - - -# Ensure the datasets library uses the expected HuggingFace endpoint -datasets.config.HF_ENDPOINT = CI_HUB_ENDPOINT -# Don't increase the datasets download counts on huggingface.co -datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False - - -def update_repo_settings( - hf_api: HfApi, - repo_id: str, - *, - private: Optional[bool] = None, - gated: Optional[bool] = None, - token: Optional[str] = None, - organization: Optional[str] = None, - repo_type: Optional[str] = None, - name: str = None, -) -> Mapping[str, bool]: - """Update the settings of a repository. - Args: - repo_id (`str`, *optional*): - A namespace (user or an organization) and a repo name separated - by a `/`. - <Tip> - Version added: 0.5 - </Tip> - private (`bool`, *optional*, defaults to `None`): - Whether the repo should be private. - gated (`bool`, *optional*, defaults to `None`): - Whether the repo should request user access. - token (`str`, *optional*): - An authentication token (See https://huggingface.co/settings/token) - repo_type (`str`, *optional*): - Set to `"dataset"` or `"space"` if uploading to a dataset or - space, `None` or `"model"` if uploading to a model. Default is - `None`. - Returns: - The HTTP response in json. - <Tip> - Raises the following errors: - - [`~huggingface_hub.utils.RepositoryNotFoundError`] - If the repository to download from cannot be found. This may be because it doesn't exist, - or because it is set to `private` and you do not have access. - </Tip> - """ - if repo_type not in REPO_TYPES: - raise ValueError("Invalid repo type") - - organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id) - - if organization is None: - namespace = hf_api.whoami(token)["name"] - else: - namespace = organization - - path_prefix = f"{hf_api.endpoint}/api/" - if repo_type in REPO_TYPES_URL_PREFIXES: - path_prefix += REPO_TYPES_URL_PREFIXES[repo_type] - - path = f"{path_prefix}{namespace}/{name}/settings" - - json = {} - if private is not None: - json["private"] = private - if gated is not None: - json["gated"] = gated - - r = requests.put( - path, - headers={"authorization": f"Bearer {token}"}, - json=json, - ) - hf_raise_for_status(r) - return r.json() - - [email protected](scope="session") -def hf_api(): - return HfApi(endpoint=CI_HUB_ENDPOINT) - - [email protected](scope="session") -def hf_token() -> str: - return CI_HUB_USER_API_TOKEN - - [email protected](scope="session") -def hf_endpoint() -> str: - return CI_HUB_ENDPOINT - - [email protected] -def cleanup_repo(hf_api: HfApi): - def _cleanup_repo(repo_id): - hf_api.delete_repo(repo_id=repo_id, token=CI_HUB_USER_API_TOKEN, repo_type="dataset") - - return _cleanup_repo - - [email protected] -def temporary_repo(cleanup_repo): - @contextmanager - def _temporary_repo(repo_id): - try: - yield repo_id - finally: - cleanup_repo(repo_id) - - return _temporary_repo - - -def create_unique_repo_name(prefix: str, user: str) -> str: - repo_name = f"{prefix}-{int(time.time() * 10e3)}" - return f"{user}/{repo_name}" - - -def create_hub_dataset_repo( - *, - hf_api: HfApi, - hf_token: str, - prefix: str, - file_paths: List[str] = None, - dataset: Dataset = None, - private=False, - gated=False, - user=CI_HUB_USER, -) -> str: - repo_id = create_unique_repo_name(prefix, user) - if dataset is not None: - dataset.push_to_hub(repo_id=repo_id, private=private, token=hf_token, embed_external_files=True) - else: - hf_api.create_repo(repo_id=repo_id, token=hf_token, repo_type="dataset", private=private) - if gated: - update_repo_settings(hf_api, repo_id, token=hf_token, gated=gated, repo_type="dataset") - if file_paths is not None: - for file_path in file_paths: - hf_api.upload_file( - token=hf_token, - path_or_fileobj=file_path, - path_in_repo=Path(file_path).name, - repo_id=repo_id, - repo_type="dataset", - ) - return repo_id - - -# https://docs.pytest.org/en/6.2.x/fixture.html#yield-fixtures-recommended [email protected](scope="session", autouse=True) -def hub_public_empty(hf_api: HfApi, hf_token: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="empty") - yield repo_id - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") - - [email protected](scope="session", autouse=True) -def hub_public_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="csv", file_paths=[csv_path]) - yield repo_id - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") - - [email protected](scope="session", autouse=True) -def hub_private_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo( - hf_api=hf_api, hf_token=hf_token, prefix="csv_private", file_paths=[csv_path], private=True - ) - yield repo_id - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") - - [email protected](scope="session", autouse=True) -def hub_gated_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: - repo_id = create_hub_dataset_repo( - hf_api=hf_api, hf_token=hf_token, prefix="csv_gated", file_paths=[csv_path], gated=True - ) - yield repo_id - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") - - [email protected](scope="session", autouse=True) -def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: - repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="audio", dataset=datasets["audio"]) - yield repo_id - with suppress(requests.exceptions.HTTPError, ValueError): - hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") - - -class HubDatasetTest(TypedDict): - name: str - splits_response: Any - - -HubDatasets = Mapping[str, HubDatasetTest] - - -def create_splits_response(dataset: str, num_bytes: float = None, num_examples: int = None): - dataset, config, split = get_default_config_split(dataset) - return { - "splits": [ - { - "dataset": dataset, - "config": config, - "split": split, - "num_bytes": num_bytes, - "num_examples": num_examples, - } - ] - } - - -DATA_cols = { - "col_1": {"_type": "Value", "dtype": "int64"}, - "col_2": {"_type": "Value", "dtype": "int64"}, - "col_3": {"_type": "Value", "dtype": "float64"}, -} -DATA_rows = [ - {"col_1": 0, "col_2": 0, "col_3": 0.0}, - {"col_1": 1, "col_2": 1, "col_3": 1.0}, - {"col_1": 2, "col_2": 2, "col_3": 2.0}, - {"col_1": 3, "col_2": 3, "col_3": 3.0}, -] - - -AUDIO_cols = { - "col": { - "_type": "Audio", - "sampling_rate": 16_000, - }, -} - - -def get_AUDIO_rows(dataset: str): - dataset, config, split = get_default_config_split(dataset) - return [ - { - "col": [ - { - "src": f"http://localhost/assets/{dataset}/--/{config}/{split}/0/col/audio.mp3", - "type": "audio/mpeg", - }, - { - "src": f"http://localhost/assets/{dataset}/--/{config}/{split}/0/col/audio.wav", - "type": "audio/wav", - }, - ] - } - ] - - [email protected](scope="session", autouse=True) -def hub_datasets( - hub_public_empty, - hub_public_csv, - hub_private_csv, - hub_gated_csv, - hub_public_audio, -) -> HubDatasets: - return { - "does_not_exist": { - "name": "does_not_exist", - "splits_response": None, - }, - "empty": { - "name": hub_public_empty, - "splits_response": None, - }, - "public": { - "name": hub_public_csv, - "splits_response": create_splits_response(hub_public_csv, None, None), - }, - "private": { - "name": hub_private_csv, - "splits_response": create_splits_response(hub_private_csv, None, None), - }, - "gated": { - "name": hub_gated_csv, - "splits_response": create_splits_response(hub_gated_csv, None, None), - }, - "audio": { - "name": hub_public_audio, - "splits_response": create_splits_response(hub_public_audio, 54.0, 1), - }, - }
4ab24c545799e30c29faa3c89eb348d9e3060e5c
Sylvain Lesage
2022-11-29T14:53:12
Simplify docker (#654)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 1196d3b6..85b34cd1 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -60 +60 @@ jobs: - QUEUE_SLEEP_TIME: "1" + WORKER_SLEEP_TIME: "1" @@ -72 +72 @@ jobs: - run: docker-compose -f ./docker-compose-datasets-server-from-remote-images.yml up -d + run: docker compose -f ./docker-compose-datasets-server.yml up -d diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 08f440e8..75b8d647 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -16 +16 @@ on: - - 'tools/docker-compose-datasets-server-from-remote-images.yml' + - 'tools/docker-compose-datasets-server.yml' diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 241b40bb..e9c0b582 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -17,8 +17 @@ Install docker (see https://docs.docker.com/engine/install/ubuntu/#install-using -make install -make start-from-local-code -``` - -To use the docker images already compiled using the CI: - -``` -make start-from-remote-images +make start @@ -27 +20 @@ make start-from-remote-images -To install a single library (in [libs](./libs)) or service (in [services](./services)), install Python 3.9 (consider [pyenv](https://github.com/pyenv/pyenv)) and [poetry](https://python-poetry.org/docs/master/#installation) (don't forget to add `poetry` to the `PATH` environment variable). +To install a single job (in [jobs](./jobs)), library (in [libs](./libs)), service (in [services](./services)) or worker (in [workers](./workers)), go to their respective directory, and install Python 3.9 (consider [pyenv](https://github.com/pyenv/pyenv)) and [poetry](https://python-poetry.org/docs/master/#installation) (don't forget to add `poetry` to the `PATH` environment variable). @@ -50 +43,6 @@ If you use VSCode, it might be useful to use the ["monorepo" workspace](./.vscod -The repository is structured as a monorepo, with Python applications in [services/](./services/) and [workers/](./workers/), and Python libraries in [libs/](./libs/). +The repository is structured as a monorepo, with Python libraries and applications in [jobs](./jobs)), [libs](./libs), [services](./services) and [workers](./workers): + +- [jobs](./jobs) contains the one-time jobs run by Helm before deploying the pods. For now, the only job migrates the databases when needed. +- [libs](./libs) contains the Python libraries used by the services and workers. For now, the only library is [libcommon](./libs/libcommon), which contains the common code for the services and workers. +- [services](./services) contains the applications: the public API, the admin API (which is separated from the public API and might be published under its own domain at some point) and the reverse proxy. +- [workers](./workers) contains the workers that process the queue asynchronously: they get a "job" (caution: not the Helm jobs, but the jobs stored in the queue), process the expected response for the associated endpoint, and store the response in the cache. @@ -92 +90 @@ The following environments contain all the modules: reverse proxy, API server, a -The CI checks the quality of the code through a [GitHub action](./.github/workflows/quality.yml). To manually format the code of a library or a service: +The CI checks the quality of the code through a [GitHub action](./.github/workflows/quality.yml). To manually format the code of a job, library, service or worker: @@ -106 +104 @@ make quality -The CI checks the tests a [GitHub action](./.github/workflows/unit-tests.yml). To manually test a library or a service: +The CI checks the tests a [GitHub action](./.github/workflows/unit-tests.yml). To manually test a job, library, service or worker: diff --git a/Makefile b/Makefile index b4a64a3b..11efd315 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ -# environment variables for the commands (docker-compose, poetry) -export LOCAL_CODE_MONGO_PORT := 27060 -export LOCAL_CODE_PORT_ADMIN := 8081 -export LOCAL_CODE_PORT_API := 8080 -export LOCAL_CODE_PORT_REVERSE_PROXY := 8000 -export LOCAL_CODE_COMPOSE_PROJECT_NAME := local-code +# environment variables for the commands (docker compose, poetry) +export MONGO_PORT := 27060 +export PORT_ADMIN := 8181 +export PORT_API := 8180 +export PORT_REVERSE_PROXY := 8100 +export COMPOSE_PROJECT_NAME := datasets-server @@ -8,5 +7,0 @@ export LOCAL_CODE_COMPOSE_PROJECT_NAME := local-code -export REMOTE_IMAGES_MONGO_PORT := 27061 -export REMOTE_IMAGES_PORT_ADMIN := 8181 -export REMOTE_IMAGES_PORT_API := 8180 -export REMOTE_IMAGES_PORT_REVERSE_PROXY := 8100 -export REMOTE_IMAGES_COMPOSE_PROJECT_NAME := remote-images @@ -14,2 +9 @@ export REMOTE_IMAGES_COMPOSE_PROJECT_NAME := remote-images -LOCAL_CODE_DOCKER_COMPOSE := ./tools/docker-compose-datasets-server-from-local-code.yml -REMOTE_IMAGES_DOCKER_COMPOSE := ./tools/docker-compose-datasets-server-from-remote-images.yml +DOCKER_COMPOSE := ./tools/docker-compose-datasets-server.yml @@ -21,8 +15,3 @@ include tools/Docker.mk -.PHONY: install -install: - $(MAKE) -C e2e/ install - $(MAKE) -C services/api/ install - $(MAKE) -C services/admin/ install - $(MAKE) -C libs/libcommon/ install - $(MAKE) -C workers/first_rows install - $(MAKE) -C workers/splits install +.PHONY: start +start: + MONGO_PORT=${MONGO_PORT} ADMIN_UVICORN_PORT=${PORT_ADMIN} API_UVICORN_PORT=${PORT_API} PORT_REVERSE_PROXY=${PORT_REVERSE_PROXY} DOCKER_COMPOSE=${DOCKER_COMPOSE} $(MAKE) up @@ -30,55 +19,3 @@ install: -.PHONY: start-from-local-code -start-from-local-code: - MONGO_PORT=${LOCAL_CODE_MONGO_PORT} ADMIN_UVICORN_PORT=${LOCAL_CODE_PORT_ADMIN} API_UVICORN_PORT=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) down - MONGO_PORT=${LOCAL_CODE_MONGO_PORT} ADMIN_UVICORN_PORT=${LOCAL_CODE_PORT_ADMIN} API_UVICORN_PORT=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) up - -.PHONY: stop-from-local-code -stop-from-local-code: - MONGO_PORT=${LOCAL_CODE_MONGO_PORT} ADMIN_UVICORN_PORT=${LOCAL_CODE_PORT_ADMIN} API_UVICORN_PORT=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) down - -.PHONY: start-from-remote-images -start-from-remote-images: - MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} ADMIN_UVICORN_PORT=${REMOTE_IMAGES_PORT_ADMIN} API_UVICORN_PORT=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) down - MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} ADMIN_UVICORN_PORT=${REMOTE_IMAGES_PORT_ADMIN} API_UVICORN_PORT=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) up - -.PHONY: stop-from-remote-images -stop-from-remote-images: - MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} ADMIN_UVICORN_PORT=${REMOTE_IMAGES_PORT_ADMIN} API_UVICORN_PORT=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) down - -.PHONY: test -test: - $(MAKE) -C services/admin/ test - $(MAKE) -C services/api/ test - $(MAKE) -C libs/libcommon/ test - $(MAKE) -C workers/first_rows test - $(MAKE) -C workers/splits test - -.PHONY: coverage -coverage: - $(MAKE) -C services/admin/ coverage - $(MAKE) -C services/api/ coverage - $(MAKE) -C libs/libcommon/ coverage - $(MAKE) -C workers/first_rows coverage - $(MAKE) -C workers/splits coverage - -# Check that source code meets quality standards + security -.PHONY: quality -quality: - $(MAKE) -C e2e/ quality - $(MAKE) -C e2e/ openapi - $(MAKE) -C chart/ quality - $(MAKE) -C services/api/ quality - $(MAKE) -C services/admin/ quality - $(MAKE) -C libs/libcommon/ quality - $(MAKE) -C workers/first_rows quality - $(MAKE) -C workers/splits quality - -# Format source code automatically -.PHONY: style -style: - $(MAKE) -C e2e/ style - $(MAKE) -C services/api/ style - $(MAKE) -C services/admin/ style - $(MAKE) -C libs/libcommon/ style - $(MAKE) -C workers/first_rows style - $(MAKE) -C workers/splits style +.PHONY: stop +stop: + MONGO_PORT=${MONGO_PORT} ADMIN_UVICORN_PORT=${PORT_ADMIN} API_UVICORN_PORT=${PORT_API} PORT_REVERSE_PROXY=${PORT_REVERSE_PROXY} DOCKER_COMPOSE=${DOCKER_COMPOSE} $(MAKE) down @@ -88,0 +26,2 @@ e2e: + +# for install, quality checks and tests of every job, lib, service or worker, see the Makefile in the corresponding folder diff --git a/chart/templates/_envCommon.tpl b/chart/templates/_envCommon.tpl index 65c2e7f4..19c75f19 100644 --- a/chart/templates/_envCommon.tpl +++ b/chart/templates/_envCommon.tpl @@ -8,0 +9,2 @@ +- name: HF_ENDPOINT # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 + value: {{ .Values.common.hfEndpoint | quote }} diff --git a/chart/templates/_envDatasetsWorker.tpl b/chart/templates/_envDatasetsWorker.tpl new file mode 100644 index 00000000..89c89974 --- /dev/null +++ b/chart/templates/_envDatasetsWorker.tpl @@ -0,0 +1,13 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envDatasetsWorker" -}} +- name: HF_DATASETS_CACHE + value: {{ .Values.hfDatasetsCache | quote }} +- name: HF_MODULES_CACHE + value: "/tmp/modules-cache" + # the size should remain so small that we don't need to worry about putting it on an external storage + # see https://github.com/huggingface/datasets-server/issues/248 +- name: NUMBA_CACHE_DIR + value: {{ .Values.numbaCacheDirectory | quote }} +{{- end -}} diff --git a/chart/templates/_envWorker.tpl b/chart/templates/_envWorker.tpl new file mode 100644 index 00000000..a6045b96 --- /dev/null +++ b/chart/templates/_envWorker.tpl @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envWorker" -}} +- name: WORKER_MAX_LOAD_PCT + value: {{ .Values.worker.maxLoadPct | quote }} +- name: WORKER_MAX_MEMORY_PCT + value: {{ .Values.worker.maxMemoryPct | quote }} +- name: WORKER_WORKER_SLEEP_SECONDS + value: {{ .Values.worker.sleepSeconds | quote }} +{{- end -}} diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index cd23cbf0..2c08de05 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -11,0 +12 @@ + # service @@ -19,0 +21,4 @@ + # prometheus + - name: PROMETHEUS_MULTIPROC_DIR + value: {{ .Values.admin.prometheusMultiprocDirectory | quote }} + # uvicorn @@ -26,2 +30,0 @@ - - name: PROMETHEUS_MULTIPROC_DIR - value: {{ .Values.admin.prometheusMultiprocDirectory | quote }} diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl index 2293aad3..d7d3c713 100644 --- a/chart/templates/services/api/_container.tpl +++ b/chart/templates/services/api/_container.tpl @@ -11,0 +12 @@ + # service @@ -17,0 +19,4 @@ + # prometheus + - name: PROMETHEUS_MULTIPROC_DIR + value: {{ .Values.api.prometheusMultiprocDirectory | quote }} + # uvicorn @@ -24,2 +28,0 @@ - - name: PROMETHEUS_MULTIPROC_DIR - value: {{ .Values.api.prometheusMultiprocDirectory | quote }} diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 8ba4d800..c6596d45 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -12,2 +12,2 @@ - - name: HF_ENDPOINT # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - value: {{ .Values.common.hfEndpoint | quote }} + {{ include "envWorker" . | nindent 2 }} + {{ include "envDatasetsWorker" . | nindent 2 }} @@ -18,14 +17,0 @@ - - name: QUEUE_MAX_LOAD_PCT - value: {{ .Values.queue.maxLoadPct | quote }} - - name: QUEUE_MAX_MEMORY_PCT - value: {{ .Values.queue.maxMemoryPct | quote }} - - name: QUEUE_WORKER_SLEEP_SECONDS - value: {{ .Values.queue.sleepSeconds | quote }} - - name: HF_DATASETS_CACHE - value: {{ .Values.hfDatasetsCache | quote }} - - name: HF_MODULES_CACHE - value: "/tmp/modules-cache" - # the size should remain so small that we don't need to worry about putting it on an external storage - # see https://github.com/huggingface/datasets-server/issues/248 - - name: NUMBA_CACHE_DIR - value: {{ .Values.numbaCacheDirectory | quote }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index f9d155d0..729932e6 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -12,2 +12,2 @@ - - name: HF_ENDPOINT # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - value: {{ .Values.common.hfEndpoint | quote }} + {{ include "envWorker" . | nindent 2 }} + {{ include "envDatasetsWorker" . | nindent 2 }} @@ -18,14 +17,0 @@ - - name: QUEUE_MAX_LOAD_PCT - value: {{ .Values.queue.maxLoadPct | quote }} - - name: QUEUE_MAX_MEMORY_PCT - value: {{ .Values.queue.maxMemoryPct | quote }} - - name: QUEUE_WORKER_SLEEP_SECONDS - value: {{ .Values.queue.sleepSeconds | quote }} - - name: HF_DATASETS_CACHE - value: {{ .Values.hfDatasetsCache | quote }} - - name: HF_MODULES_CACHE - value: "/tmp/modules-cache" - # the size should remain so small that we don't need to worry about putting it on an external storage - # see https://github.com/huggingface/datasets-server/issues/248 - - name: NUMBA_CACHE_DIR - value: {{ .Values.numbaCacheDirectory | quote }} diff --git a/chart/values.yaml b/chart/values.yaml index f517e80f..692b727f 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -56,0 +57,4 @@ queue: + # Name of the mongo db database used to store the jobs queue + mongoDatabase: "datasets_server_queue" + +worker: @@ -61,2 +64,0 @@ queue: - # Name of the mongo db database used to store the jobs queue - mongoDatabase: "datasets_server_queue" diff --git a/e2e/Makefile b/e2e/Makefile index 905735b2..3b312b8d 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry) @@ -16 +16 @@ export API_UVICORN_NUM_WORKERS := 2 -DOCKER_COMPOSE := ../tools/docker-compose-datasets-server-from-remote-images.yml +DOCKER_COMPOSE := ../tools/docker-compose-datasets-server.yml diff --git a/jobs/mongodb_migration/Makefile b/jobs/mongodb_migration/Makefile index 5ad88b8b..dbd4c0c6 100644 --- a/jobs/mongodb_migration/Makefile +++ b/jobs/mongodb_migration/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry) diff --git a/libs/libcommon/Makefile b/libs/libcommon/Makefile index b2e17e4d..57583a9c 100644 --- a/libs/libcommon/Makefile +++ b/libs/libcommon/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry) diff --git a/services/admin/Makefile b/services/admin/Makefile index 342d0e17..5f06b08a 100644 --- a/services/admin/Makefile +++ b/services/admin/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry) diff --git a/services/api/Makefile b/services/api/Makefile index a473b9f1..f7c14367 100644 --- a/services/api/Makefile +++ b/services/api/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry) diff --git a/services/api/README.md b/services/api/README.md index d492f2cd..5543a2a3 100644 --- a/services/api/README.md +++ b/services/api/README.md @@ -7 +7 @@ -The worker con be configured using environment variables. They are grouped by scope. +The worker can be configured using environment variables. They are grouped by scope. diff --git a/services/reverse-proxy/README.md b/services/reverse-proxy/README.md index e6de8256..2c0a7355 100644 --- a/services/reverse-proxy/README.md +++ b/services/reverse-proxy/README.md @@ -5 +5 @@ -See [docker-compose.yml](../../docker-compose.yml) for usage. +See [docker-compose-datasets-server.yml](../../tools/docker-compose-datasets-server.yml) for usage. diff --git a/tools/Docker.mk b/tools/Docker.mk index 27545dd7..ae004196 100644 --- a/tools/Docker.mk +++ b/tools/Docker.mk @@ -3 +3 @@ down: - docker-compose -f $(DOCKER_COMPOSE) down -v --remove-orphans + docker compose -f $(DOCKER_COMPOSE) down --remove-orphans --volumes @@ -7 +7 @@ up: - docker-compose -f $(DOCKER_COMPOSE) up -d + docker compose -f $(DOCKER_COMPOSE) up -d --force-recreate --remove-orphans --renew-anon-volumes diff --git a/tools/PythonTest.mk b/tools/PythonTest.mk index 85c8d92e..c6ebbb17 100644 --- a/tools/PythonTest.mk +++ b/tools/PythonTest.mk @@ -3 +2,0 @@ test: - $(MAKE) down @@ -10 +8,0 @@ coverage: - $(MAKE) down diff --git a/tools/docker-compose-base.yml b/tools/docker-compose-base.yml new file mode 100644 index 00000000..6388202c --- /dev/null +++ b/tools/docker-compose-base.yml @@ -0,0 +1,31 @@ +version: "3.9" +services: + common: + environment: + # common + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + # huggingface_hub + HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 + # cache + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + # queue + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + # worker + WORKER_MAX_LOAD_PCT: ${WORKER_MAX_LOAD_PCT-70} + WORKER_MAX_MEMORY_PCT: ${WORKER_MAX_MEMORY_PCT-80} + WORKER_SLEEP_SECONDS: ${WORKER_SLEEP_SECONDS-15} + datasets-worker: + extends: + service: common + environment: + # datasets + HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} diff --git a/tools/docker-compose-datasets-server-from-local-code.yml b/tools/docker-compose-datasets-server-from-local-code.yml deleted file mode 100644 index eab90c44..00000000 --- a/tools/docker-compose-datasets-server-from-local-code.yml +++ /dev/null @@ -1,163 +0,0 @@ -version: "3.9" -services: - reverse-proxy: - image: ${IMAGE_REVERSE_PROXY?IMAGE_REVERSE_PROXY env var must be provided} - volumes: - - ../chart/nginx-templates/:/etc/nginx/templates:ro - - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro - - ../chart/static-files/openapi.json:/static-files/openapi.json:ro - ports: - - "${PORT_REVERSE_PROXY-8000}:80" - environment: - ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - HOST: localhost - PORT: 80 - URL_ADMIN: http://admin:${ADMIN_UVICORN_PORT-8081} - URL_API: http://api:${API_UVICORN_PORT-8080} - depends_on: - - api - - admin - admin: - build: - context: .. - dockerfile: services/admin/Dockerfile - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-} - ADMIN_CACHE_REPORTS_NUM_RESULTS: ${ADMIN_CACHE_REPORTS_NUM_RESULTS-100} - ADMIN_HF_WHOAMI_PATH: ${ADMIN_HF_WHOAMI_PATH-/api/whoami-v2} - ADMIN_MAX_AGE: ${ADMIN_MAX_AGE-10} - PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} - ADMIN_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose - ADMIN_UVICORN_NUM_WORKERS: ${ADMIN_UVICORN_NUM_WORKERS-2} - ADMIN_UVICORN_PORT: ${ADMIN_UVICORN_PORT-8081} - depends_on: - - mongodb - restart: always - ports: - # for debug - - ${ADMIN_UVICORN_PORT-8081}:${ADMIN_UVICORN_PORT-8081} - api: - build: - context: .. - dockerfile: services/api/Dockerfile - volumes: - - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - API_HF_AUTH_PATH: ${API_HF_AUTH_PATH-/api/datasets/%s/auth-check} - API_MAX_AGE_LONG: ${API_MAX_AGE_LONG-120} - API_MAX_AGE_SHORT: ${API_MAX_AGE_SHORT-10} - PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} - API_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose - API_UVICORN_NUM_WORKERS: ${API_UVICORN_NUM_WORKERS-2} - API_UVICORN_PORT: ${API_UVICORN_PORT-8080} - ports: - # for debug - - ${API_UVICORN_PORT-8080}:${API_UVICORN_PORT-8080} - depends_on: - - mongodb - restart: unless-stopped - worker-splits: - build: - context: .. - dockerfile: workers/splits/Dockerfile - volumes: - - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw - - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw - - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} - HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} - NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} - depends_on: - - mongodb - restart: always - worker-first-rows: - build: - context: .. - dockerfile: workers/first-rows/Dockerfile - volumes: - - first-rows-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw - - first-rows-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw - - first-rows-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} - HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} - NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} - FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE: ${FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE-100_000_000} - FIRST_ROWS_MAX_BYTES: ${FIRST_ROWS_MAX_BYTES-1_000_000} - FIRST_ROWS_MAX_NUMBER: ${FIRST_ROWS_MAX_NUMBER-100} - FIRST_ROWS_MIN_CELL_BYTES: ${FIRST_ROWS_MIN_CELL_BYTES-100} - FIRST_ROWS_MIN_NUMBER: ${FIRST_ROWS_MIN_NUMBER-10} - depends_on: - - mongodb - restart: always - mongodb: - image: mongo - volumes: - - mongo:/data/db:rw - ports: - # for debug - - "${MONGO_PORT-27017}:27017" -volumes: - assets: - mongo: - splits-datasets-cache: - splits-modules-cache: - splits-numba-cache: - first-rows-datasets-cache: - first-rows-modules-cache: - first-rows-numba-cache: diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml deleted file mode 100644 index 70728814..00000000 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ /dev/null @@ -1,155 +0,0 @@ -version: "3.9" -services: - reverse-proxy: - image: ${IMAGE_REVERSE_PROXY?IMAGE_REVERSE_PROXY env var must be provided} - volumes: - - ../chart/nginx-templates/:/etc/nginx/templates:ro - - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro - - ../chart/static-files/openapi.json:/static-files/openapi.json:ro - ports: - - "${PORT_REVERSE_PROXY-8000}:80" - environment: - ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - HOST: localhost - PORT: 80 - URL_ADMIN: http://admin:${ADMIN_UVICORN_PORT-8081} - URL_API: http://api:${API_UVICORN_PORT-8080} - depends_on: - - api - - admin - admin: - image: ${IMAGE_SERVICE_ADMIN?IMAGE_SERVICE_ADMIN env var must be provided} - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-} - ADMIN_CACHE_REPORTS_NUM_RESULTS: ${ADMIN_CACHE_REPORTS_NUM_RESULTS-100} - ADMIN_HF_WHOAMI_PATH: ${ADMIN_HF_WHOAMI_PATH-/api/whoami-v2} - ADMIN_MAX_AGE: ${ADMIN_MAX_AGE-10} - PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} - ADMIN_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose - ADMIN_UVICORN_NUM_WORKERS: ${ADMIN_UVICORN_NUM_WORKERS-2} - ADMIN_UVICORN_PORT: ${ADMIN_UVICORN_PORT-8081} - depends_on: - - mongodb - restart: always - ports: - # for debug - - ${ADMIN_UVICORN_PORT-8081}:${ADMIN_UVICORN_PORT-8081} - api: - image: ${IMAGE_SERVICE_API?IMAGE_SERVICE_API env var must be provided} - volumes: - - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - API_HF_AUTH_PATH: ${API_HF_AUTH_PATH-/api/datasets/%s/auth-check} - API_MAX_AGE_LONG: ${API_MAX_AGE_LONG-120} - API_MAX_AGE_SHORT: ${API_MAX_AGE_SHORT-10} - PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} - API_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose - API_UVICORN_NUM_WORKERS: ${API_UVICORN_NUM_WORKERS-2} - API_UVICORN_PORT: ${API_UVICORN_PORT-8080} - ports: - # for debug - - ${API_UVICORN_PORT-8080}:${API_UVICORN_PORT-8080} - depends_on: - - mongodb - restart: unless-stopped - worker-splits: - image: ${IMAGE_WORKER_SPLITS?IMAGE_WORKER_SPLITS env var must be provided} - volumes: - - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw - - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw - - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} - HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} - NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} - depends_on: - - mongodb - restart: always - worker-first-rows: - image: ${IMAGE_WORKER_FIRST_ROWS?IMAGE_WORKER_FIRST_ROWS env var must be provided} - volumes: - - first-rows-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw - - first-rows-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw - - first-rows-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw - environment: - CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} - CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default - CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} - QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} - QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} - QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} - QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default - QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} - QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} - COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy - COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} - HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 - COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} - COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} - HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} - HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} - NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} - FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE: ${FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE-100_000_000} - FIRST_ROWS_MAX_BYTES: ${FIRST_ROWS_MAX_BYTES-1_000_000} - FIRST_ROWS_MAX_NUMBER: ${FIRST_ROWS_MAX_NUMBER-100} - FIRST_ROWS_MIN_CELL_BYTES: ${FIRST_ROWS_MIN_CELL_BYTES-100} - FIRST_ROWS_MIN_NUMBER: ${FIRST_ROWS_MIN_NUMBER-10} - depends_on: - - mongodb - restart: always - mongodb: - image: mongo - volumes: - - mongo:/data/db:rw - ports: - # for debug - - "${MONGO_PORT-27017}:27017" -volumes: - assets: - mongo: - splits-datasets-cache: - splits-modules-cache: - splits-numba-cache: - first-rows-datasets-cache: - first-rows-modules-cache: - first-rows-numba-cache: diff --git a/tools/docker-compose-datasets-server.yml b/tools/docker-compose-datasets-server.yml new file mode 100644 index 00000000..8cbefbea --- /dev/null +++ b/tools/docker-compose-datasets-server.yml @@ -0,0 +1,129 @@ +version: "3.9" +services: + reverse-proxy: + image: ${IMAGE_REVERSE_PROXY?IMAGE_REVERSE_PROXY env var must be provided} + volumes: + - ../chart/nginx-templates/:/etc/nginx/templates:ro + - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro + - ../chart/static-files/openapi.json:/static-files/openapi.json:ro + ports: + - "${PORT_REVERSE_PROXY-8000}:80" + environment: + ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + HOST: localhost + PORT: 80 + URL_ADMIN: http://admin:${ADMIN_UVICORN_PORT-8081} + URL_API: http://api:${API_UVICORN_PORT-8080} + depends_on: + - api + - admin + admin: + # build: + # context: .. + # dockerfile: services/admin/Dockerfile + image: ${IMAGE_SERVICE_ADMIN?IMAGE_SERVICE_ADMIN env var must be provided} + extends: + file: docker-compose-base.yml + service: common + environment: + # service + ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-} + ADMIN_CACHE_REPORTS_NUM_RESULTS: ${ADMIN_CACHE_REPORTS_NUM_RESULTS-100} + ADMIN_HF_WHOAMI_PATH: ${ADMIN_HF_WHOAMI_PATH-/api/whoami-v2} + ADMIN_MAX_AGE: ${ADMIN_MAX_AGE-10} + # prometheus + PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} + # uvicorn + ADMIN_UVICORN_HOSTNAME: 0.0.0.0 # required for docker compose + ADMIN_UVICORN_NUM_WORKERS: ${ADMIN_UVICORN_NUM_WORKERS-2} + ADMIN_UVICORN_PORT: ${ADMIN_UVICORN_PORT-8081} + depends_on: + - mongodb + restart: always + ports: + # for debug + - ${ADMIN_UVICORN_PORT-8081}:${ADMIN_UVICORN_PORT-8081} + api: + # build: + # context: .. + # dockerfile: services/api/Dockerfile + image: ${IMAGE_SERVICE_API?IMAGE_SERVICE_API env var must be provided} + volumes: + - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro + extends: + file: docker-compose-base.yml + service: common + environment: + # service + API_HF_AUTH_PATH: ${API_HF_AUTH_PATH-/api/datasets/%s/auth-check} + API_MAX_AGE_LONG: ${API_MAX_AGE_LONG-120} + API_MAX_AGE_SHORT: ${API_MAX_AGE_SHORT-10} + # prometheus + PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} + # uvicorn + API_UVICORN_HOSTNAME: 0.0.0.0 # required for docker compose + API_UVICORN_NUM_WORKERS: ${API_UVICORN_NUM_WORKERS-2} + API_UVICORN_PORT: ${API_UVICORN_PORT-8080} + ports: + # for debug + - ${API_UVICORN_PORT-8080}:${API_UVICORN_PORT-8080} + depends_on: + - mongodb + restart: unless-stopped + worker-splits: + # build: + # context: .. + # dockerfile: workers/splits/Dockerfile + image: ${IMAGE_WORKER_SPLITS?IMAGE_WORKER_SPLITS env var must be provided} + volumes: + - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 + HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} + depends_on: + - mongodb + restart: always + worker-first-rows: + # build: + # context: .. + # dockerfile: workers/first-rows/Dockerfile + image: ${IMAGE_WORKER_FIRST_ROWS?IMAGE_WORKER_FIRST_ROWS env var must be provided} + volumes: + - first-rows-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - first-rows-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - first-rows-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw + extends: + file: docker-compose-base.yml + service: datasets-worker + environment: + FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE: ${FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE-100_000_000} + FIRST_ROWS_MAX_BYTES: ${FIRST_ROWS_MAX_BYTES-1_000_000} + FIRST_ROWS_MAX_NUMBER: ${FIRST_ROWS_MAX_NUMBER-100} + FIRST_ROWS_MIN_CELL_BYTES: ${FIRST_ROWS_MIN_CELL_BYTES-100} + FIRST_ROWS_MIN_NUMBER: ${FIRST_ROWS_MIN_NUMBER-10} + depends_on: + - mongodb + restart: always + mongodb: + image: mongo + volumes: + - mongo:/data/db:rw + ports: + # for debug + - "${MONGO_PORT-27017}:27017" +volumes: + assets: + mongo: + splits-datasets-cache: + splits-modules-cache: + splits-numba-cache: + first-rows-datasets-cache: + first-rows-modules-cache: + first-rows-numba-cache: diff --git a/workers/first_rows/Makefile b/workers/first_rows/Makefile index a27aaf5f..d4914da4 100644 --- a/workers/first_rows/Makefile +++ b/workers/first_rows/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry) diff --git a/workers/splits/Makefile b/workers/splits/Makefile index 7fc2004b..7bb6d766 100644 --- a/workers/splits/Makefile +++ b/workers/splits/Makefile @@ -1 +1 @@ -# environment variables for the commands (docker-compose, poetry) +# environment variables for the commands (docker compose, poetry)
b5b820a8d2845bdf7bdc4e11aadf2331b9b228f4
Sylvain Lesage
2022-11-29T11:48:06
feat: 🎸 cancel-jobs must be a POST request, not a GET (#653)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 5d92fc2b..2566e41d 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -8 +8 @@ - "admin": "huggingface/datasets-server-services-admin:sha-b9b950d", + "admin": "huggingface/datasets-server-services-admin:sha-298bbc9", diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 19797deb..7d2d948e 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -82,0 +83 @@ def create_app() -> Starlette: + methods=["POST"],
1233c2f5949114e2043adedf2b22b4e7bde92d46
Sylvain Lesage
2022-11-29T11:44:26
Fix ask access (#652)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index d6467df7..5d92fc2b 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-7eb22ac", - "api": "huggingface/datasets-server-services-api:sha-6907835" + "admin": "huggingface/datasets-server-services-admin:sha-b9b950d", + "api": "huggingface/datasets-server-services-api:sha-b9b950d" diff --git a/libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl new file mode 100644 index 00000000..7e28167d Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.2.tar.gz b/libs/libcommon/dist/libcommon-0.5.2.tar.gz new file mode 100644 index 00000000..b5da8cc9 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.2.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 63f049c3..286b3001 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.5.1" +version = "0.5.2" @@ -35,0 +36 @@ requires = ["poetry-core>=1.0.0"] +# addopts = "-k 'wip'" @@ -37 +38,4 @@ filterwarnings = ["ignore::DeprecationWarning"] - +markers = [ + "real_dataset: tests on the Hub", + "wip: tests being developed" +] diff --git a/libs/libcommon/src/libcommon/dataset.py b/libs/libcommon/src/libcommon/dataset.py index 00d66d9c..67025103 100644 --- a/libs/libcommon/src/libcommon/dataset.py +++ b/libs/libcommon/src/libcommon/dataset.py @@ -8 +8,6 @@ import requests -from huggingface_hub.hf_api import DatasetInfo, HfApi, build_hf_headers +from huggingface_hub.hf_api import ( + DatasetInfo, + HfApi, + RepositoryNotFoundError, + build_hf_headers, +) @@ -154,3 +159,4 @@ def get_dataset_info_for_supported_datasets( - ask_access(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) - dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) - # ^ should not raise, since it would have raised before when reaching ask_access + try: + dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) + except RepositoryNotFoundError: + ask_access(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) diff --git a/libs/libcommon/tests/test_dataset.py b/libs/libcommon/tests/test_dataset.py new file mode 100644 index 00000000..2db05f67 --- /dev/null +++ b/libs/libcommon/tests/test_dataset.py @@ -0,0 +1,14 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import pytest + +from libcommon.dataset import check_support + + [email protected]_dataset +def test_check_support() -> None: + dataset = "glue" + hf_endpoint = "https://huggingface.co" + hf_token = None + check_support(dataset, hf_endpoint, hf_token) diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 2bb24282..5af58183 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -356 +356 @@ name = "libcommon" -version = "0.5.1" +version = "0.5.2" @@ -374 +374 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl" @@ -991 +991 @@ python-versions = "3.9.6" -content-hash = "d819cdf3b8a0c01bb42c4ffb7b524e25a4009709aaa82602ff83879f57e7ace0" +content-hash = "7da2ea2d7009addc9d940eda71eee51a1990f170dd82a19ed3a0ae8a2e348550" @@ -1170 +1170 @@ libcommon = [ - {file = "libcommon-0.5.1-py3-none-any.whl", hash = "sha256:a8d585735a14eb4a1df02fe9e2444627541c5cf7a8a10d9c79ca2d36cf90aff8"}, + {file = "libcommon-0.5.2-py3-none-any.whl", hash = "sha256:6df419dbbe249cb9572ead24a2534a43aa128ff1f43a9da9cfe480c751c6ba21"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 75475512..c38dce8a 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl", develop = false } @@ -37,0 +38 @@ markers = [ + "real_dataset: tests on the Hub", diff --git a/services/admin/tests/test_app_real.py b/services/admin/tests/test_app_real.py new file mode 100644 index 00000000..f69d66c9 --- /dev/null +++ b/services/admin/tests/test_app_real.py @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import _clean_cache_database +from pytest import MonkeyPatch, fixture, mark +from starlette.testclient import TestClient + +from admin.app import create_app +from admin.config import AppConfig + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="module") +def real_monkeypatch(): + monkeypatch = MonkeyPatch() + monkeypatch.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + monkeypatch.setenv("COMMON_HF_ENDPOINT", "https://huggingface.co") + monkeypatch.setenv("COMMON_HF_TOKEN", "") + yield monkeypatch + monkeypatch.undo() + + +@fixture(scope="module") +def real_client(real_monkeypatch: MonkeyPatch) -> TestClient: + return TestClient(create_app()) + + +@fixture(scope="module") +def real_app_config(real_monkeypatch: MonkeyPatch) -> AppConfig: + app_config = AppConfig() + if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + if app_config.common.hf_endpoint != "https://huggingface.co": + raise ValueError("Test must be launched on the production hub") + return app_config + + +@fixture(autouse=True) +def real_clean_mongo_databases(real_app_config: AppConfig) -> None: + _clean_cache_database() + _clean_queue_database() + + [email protected]_dataset +def test_force_refresh( + real_app_config: AppConfig, + real_client: TestClient, +) -> None: + dataset = "glue" + path = next(iter(real_app_config.processing_graph.graph.steps.values())).endpoint + response = real_client.post(f"/force-refresh{path}?dataset={dataset}") + assert response.status_code == 200, response.text diff --git a/services/api/poetry.lock b/services/api/poetry.lock index d9d994c7..c9cde161 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -372 +372 @@ name = "libcommon" -version = "0.5.1" +version = "0.5.2" @@ -390 +390 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl" @@ -1033 +1033 @@ python-versions = "3.9.6" -content-hash = "169c56202dc047e7954fb71768fc1a06ad1b2e4cdbe14bb2e3e73c271ca9b83d" +content-hash = "a43bc3379c79e6a46a9e1c0e285dd5a1b856c40ed39fae242c72da24a5ce331e" @@ -1216 +1216 @@ libcommon = [ - {file = "libcommon-0.5.1-py3-none-any.whl", hash = "sha256:a8d585735a14eb4a1df02fe9e2444627541c5cf7a8a10d9c79ca2d36cf90aff8"}, + {file = "libcommon-0.5.2-py3-none-any.whl", hash = "sha256:6df419dbbe249cb9572ead24a2534a43aa128ff1f43a9da9cfe480c751c6ba21"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 0eea603f..aa66597c 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.2-py3-none-any.whl", develop = false } @@ -38,0 +39 @@ markers = [ + "real_dataset: tests on the Hub", diff --git a/services/api/tests/test_app_real.py b/services/api/tests/test_app_real.py new file mode 100644 index 00000000..26e31700 --- /dev/null +++ b/services/api/tests/test_app_real.py @@ -0,0 +1,55 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + + +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import _clean_cache_database +from pytest import MonkeyPatch, fixture, mark +from starlette.testclient import TestClient + +from api.app import create_app +from api.config import AppConfig + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="module") +def real_monkeypatch(): + monkeypatch = MonkeyPatch() + monkeypatch.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + monkeypatch.setenv("COMMON_HF_ENDPOINT", "https://huggingface.co") + monkeypatch.setenv("COMMON_HF_TOKEN", "") + yield monkeypatch + monkeypatch.undo() + + +@fixture(scope="module") +def real_client(real_monkeypatch: MonkeyPatch) -> TestClient: + return TestClient(create_app()) + + +@fixture(scope="module") +def real_app_config(real_monkeypatch: MonkeyPatch) -> AppConfig: + app_config = AppConfig() + if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + if app_config.common.hf_endpoint != "https://huggingface.co": + raise ValueError("Test must be launched on the production hub") + return app_config + + +@fixture(autouse=True) +def real_clean_mongo_databases(real_app_config: AppConfig) -> None: + _clean_cache_database() + _clean_queue_database() + + [email protected] [email protected]_dataset +def test_webhook( + real_client: TestClient, +) -> None: + dataset = "glue" + payload = {"event": "add", "repo": {"type": "dataset", "name": dataset, "gitalyUid": "123"}} + response = real_client.post("/webhook", json=payload) + assert response.status_code == 200, response.text
8e5f8761922ae22a272ac74adfa445a1336d7484
Sylvain Lesage
2022-11-28T21:47:19
Implement generic processing steps (#650)
diff --git a/.github/workflows/l-libcache.yml b/.github/workflows/l-libcache.yml deleted file mode 100644 index 592303de..00000000 --- a/.github/workflows/l-libcache.yml +++ /dev/null @@ -1,22 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: libs/libcache -on: - workflow_dispatch: - push: - paths: - - 'libs/libcache/**' - - '.github/workflows/l-libcache.yml' - - '.github/workflows/_quality-python.yml' - - '.github/workflows/_unit-tests-python.yml' - - 'tools/docker-compose-mongo.yml' -jobs: - quality: - uses: ./.github/workflows/_quality-python.yml - with: - working-directory: libs/libcache - unit-tests: - uses: ./.github/workflows/_unit-tests-python.yml - with: - working-directory: libs/libcache diff --git a/.github/workflows/l-libcommon.yml b/.github/workflows/l-libcommon.yml index f8f15511..d1b50732 100644 --- a/.github/workflows/l-libcommon.yml +++ b/.github/workflows/l-libcommon.yml @@ -13 +13 @@ on: - - 'tools/docker-compose-empty.yml' + - 'tools/docker-compose-mongo.yml' diff --git a/.github/workflows/l-libqueue.yml b/.github/workflows/l-libqueue.yml deleted file mode 100644 index 5477472e..00000000 --- a/.github/workflows/l-libqueue.yml +++ /dev/null @@ -1,22 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -name: libs/libqueue -on: - workflow_dispatch: - push: - paths: - - 'libs/libqueue/**' - - '.github/workflows/l-libqueue.yml' - - '.github/workflows/_quality-python.yml' - - '.github/workflows/_unit-tests-python.yml' - - 'tools/docker-compose-mongo.yml' -jobs: - quality: - uses: ./.github/workflows/_quality-python.yml - with: - working-directory: libs/libqueue - unit-tests: - uses: ./.github/workflows/_unit-tests-python.yml - with: - working-directory: libs/libqueue diff --git a/.vscode/monorepo.code-workspace b/.vscode/monorepo.code-workspace index b4ce202d..7ddb509d 100644 --- a/.vscode/monorepo.code-workspace +++ b/.vscode/monorepo.code-workspace @@ -15,4 +14,0 @@ - { - "name": "libs/libcache", - "path": "../libs/libcache" - }, @@ -23,4 +18,0 @@ - { - "name": "libs/libqueue", - "path": "../libs/libqueue" - }, @@ -42,0 +35,4 @@ + { + "name": "workers/parquet", + "path": "../workers/parquet" + }, diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 9c30e6d7..241b40bb 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -58 +58 @@ The application is distributed in several components. -The precomputed responses are stored in a Mongo database called "cache" (see [libcache](./libs/libcache)). They are computed by [workers](./workers) which take their jobs from a job queue stored in a Mongo database called "queue" (see [libqueue](./libs/libqueue)), and store the results (error or valid response) into the "cache". +The precomputed responses are stored in a Mongo database called "cache". They are computed by [workers](./workers) which take their jobs from a job queue stored in a Mongo database called "queue", and store the results (error or valid response) into the "cache" (see [libcommon](./libs/libcommon)). @@ -130 +130 @@ We version the [libraries](./libs) as they are dependencies of the [services](./ -And then update the library version in the services that require the update, for example if the library is `libcache`: +And then update the library version in the services that require the update, for example if the library is `libcommon`: @@ -133 +133 @@ And then update the library version in the services that require the update, for -poetry update libcache +poetry update libcommon diff --git a/Makefile b/Makefile index 116e609f..b4a64a3b 100644 --- a/Makefile +++ b/Makefile @@ -26,2 +25,0 @@ install: - $(MAKE) -C libs/libcache/ install - $(MAKE) -C libs/libqueue/ install @@ -54,2 +51,0 @@ test: - $(MAKE) -C libs/libcache/ test - $(MAKE) -C libs/libqueue/ test @@ -64,2 +59,0 @@ coverage: - $(MAKE) -C libs/libcache/ coverage - $(MAKE) -C libs/libqueue/ coverage @@ -78,2 +71,0 @@ quality: - $(MAKE) -C libs/libcache/ quality - $(MAKE) -C libs/libqueue/ quality @@ -90,2 +81,0 @@ style: - $(MAKE) -C libs/libcache/ style - $(MAKE) -C libs/libqueue/ style diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index ae5c52de..d6467df7 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-2d81b2f" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-c815296" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-2d81b2f", - "api": "huggingface/datasets-server-services-api:sha-2d81b2f" + "admin": "huggingface/datasets-server-services-admin:sha-7eb22ac", + "api": "huggingface/datasets-server-services-api:sha-6907835" @@ -12,2 +12,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-2d81b2f", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-2d81b2f" + "splits": "huggingface/datasets-server-workers-splits:sha-5b9a872", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-5b9a872" diff --git a/chart/static-files/openapi.json b/chart/static-files/openapi.json index 0aff92b7..5625ce8e 100644 --- a/chart/static-files/openapi.json +++ b/chart/static-files/openapi.json @@ -1839 +1839 @@ - "error": "Parameters 'dataset', 'config' and 'split' are required" + "error": "Parameter 'dataset' is required" @@ -1845 +1845 @@ - "error": "Parameters 'dataset', 'config' and 'split' are required" + "error": "Parameters 'config' and 'split' are required" @@ -1851 +1851 @@ - "error": "Parameters 'dataset', 'config' and 'split' are required" + "error": "Parameters 'config' and 'split' are required" @@ -1857 +1857 @@ - "error": "Parameters 'dataset', 'config' and 'split' are required" + "error": "Parameter 'dataset' is required" @@ -1863 +1863 @@ - "error": "Parameters 'dataset', 'config' and 'split' are required" + "error": "Parameters 'config' and 'split' are required" @@ -1869 +1869 @@ - "error": "Parameters 'dataset', 'config' and 'split' are required" + "error": "Parameters 'config' and 'split' are required" diff --git a/e2e/pyproject.toml b/e2e/pyproject.toml index b98bd916..03afc152 100644 --- a/e2e/pyproject.toml +++ b/e2e/pyproject.toml @@ -28,0 +29 @@ requires = ["poetry-core>=1.0.0"] +# addopts = "-k 'wip'" @@ -29,0 +31,3 @@ filterwarnings = ["ignore::DeprecationWarning"] +markers = [ + "wip: tests being developed" +] diff --git a/e2e/tests/fixtures/hub.py b/e2e/tests/fixtures/hub.py index b9caf969..fd513b1b 100644 --- a/e2e/tests/fixtures/hub.py +++ b/e2e/tests/fixtures/hub.py @@ -8 +8 @@ from contextlib import contextmanager, suppress -from typing import Dict, Iterable, Literal, Optional, TypedDict +from typing import Iterable, Literal, Mapping, Optional, TypedDict @@ -40 +40 @@ def update_repo_settings( -) -> Dict[str, bool]: +) -> Mapping[str, bool]: @@ -210 +210 @@ AuthType = Literal["cookie", "token", "none"] -AuthHeaders = Dict[AuthType, Dict[str, str]] +AuthHeaders = Mapping[AuthType, Mapping[str, str]] diff --git a/e2e/tests/test_10_healthcheck.py b/e2e/tests/test_10_healthcheck.py index a7425e0e..42d59cb4 100644 --- a/e2e/tests/test_10_healthcheck.py +++ b/e2e/tests/test_10_healthcheck.py @@ -3,0 +4,2 @@ +import pytest + @@ -7 +9,2 @@ from .utils import poll -def test_healthcheck(): [email protected]("endpoint", ["/", "/healthcheck", "/metrics"]) +def test_healthcheck(endpoint: str) -> None: @@ -9,5 +12 @@ def test_healthcheck(): - response = poll("/healthcheck", expected_code=404) - assert response.status_code == 404, f"{response.status_code} - {response.text}" - assert "Not Found" in response.text, response.text - - response = poll("/metrics", expected_code=404) + response = poll(endpoint, expected_code=404) diff --git a/e2e/tests/test_11_auth.py b/e2e/tests/test_11_auth.py index b7a697b4..8f250b15 100644 --- a/e2e/tests/test_11_auth.py +++ b/e2e/tests/test_11_auth.py @@ -9 +8,0 @@ from .utils import ( - get, @@ -12 +11,2 @@ from .utils import ( - refresh_poll_splits, + poll_splits, + post_refresh, @@ -22 +22 @@ def log(response: Response, dataset: str) -> str: - "type,auth,status_code,error_code_splits,error_code_first_rows", + "type,auth,webhook_status_code,response_status_code,error_code_splits,error_code_first_rows", @@ -24,9 +24,13 @@ def log(response: Response, dataset: str) -> str: - ("public", "none", 200, None, None), - ("public", "token", 200, None, None), - ("public", "cookie", 200, None, None), - ("gated", "none", 401, "ExternalUnauthenticatedError", "ExternalUnauthenticatedError"), - ("gated", "token", 200, None, None), - ("gated", "cookie", 200, None, None), - ("private", "none", 401, "ExternalUnauthenticatedError", "ExternalUnauthenticatedError"), - ("private", "token", 404, "SplitsResponseNotFound", "FirstRowsResponseNotFound"), - ("private", "cookie", 404, "SplitsResponseNotFound", "FirstRowsResponseNotFound"), + ("public", "none", 200, 200, None, None), + ("public", "token", 200, 200, None, None), + ("public", "cookie", 200, 200, None, None), + # gated: webhook_status_code is 200 because the access is asked for the app token, not the user token + # (which is not passed to the webhook request) + ("gated", "none", 200, 401, "ExternalUnauthenticatedError", "ExternalUnauthenticatedError"), + ("gated", "token", 200, 200, None, None), + ("gated", "cookie", 200, 200, None, None), + # private: webhook_status_code is 400 because the access is asked for the app token, which has no + # access to the private datasets. As a consequence, no data in the cache + ("private", "none", 400, 401, "ExternalUnauthenticatedError", "ExternalUnauthenticatedError"), + ("private", "token", 400, 404, "ResponseNotFound", "ResponseNotFound"), + ("private", "cookie", 400, 404, "ResponseNotFound", "ResponseNotFound"), @@ -40 +44,2 @@ def test_split_public_auth( - status_code: int, + webhook_status_code: int, + response_status_code: int, @@ -44,3 +48,0 @@ def test_split_public_auth( - if auth not in auth_headers: - # ignore the test case if the auth type is not configured - pytest.skip(f"auth {auth} has not been configured") @@ -48,7 +50,4 @@ def test_split_public_auth( - # private: no need to refresh, it's not implemented. - r_splits = ( - get(f"/splits?dataset={dataset}", headers=auth_headers[auth]) - if type == "private" - else refresh_poll_splits(dataset, headers=auth_headers[auth]) - ) - assert r_splits.status_code == status_code, log(r_splits, dataset) + r_webhook = post_refresh(dataset) + assert r_webhook.status_code == webhook_status_code, log(r_webhook, dataset) + r_splits = poll_splits(dataset, headers=auth_headers[auth]) + assert r_splits.status_code == response_status_code, log(r_splits, dataset) @@ -56,7 +55,2 @@ def test_split_public_auth( - - r_rows = ( - get(f"/first-rows?dataset={dataset}&config={config}&split={split}", headers=auth_headers[auth]) - if type == "private" - else poll_first_rows(dataset, config, split, headers=auth_headers[auth]) - ) - assert r_rows.status_code == status_code, log(r_rows, dataset) + r_rows = poll_first_rows(dataset, config, split, headers=auth_headers[auth]) + assert r_rows.status_code == response_status_code, log(r_rows, dataset) diff --git a/e2e/tests/test_12_splits.py b/e2e/tests/test_12_splits.py index fe8cea99..23481bad 100644 --- a/e2e/tests/test_12_splits.py +++ b/e2e/tests/test_12_splits.py @@ -6,7 +6 @@ import pytest -from .utils import ( - get, - get_openapi_body_example, - poll, - post_refresh, - refresh_poll_splits, -) +from .utils import get, get_openapi_body_example, poll, poll_splits, post_refresh @@ -46 +40 @@ from .utils import ( -def test_splits(status: int, name: str, dataset: str, error_code: str): +def test_splits_using_openapi(status: int, name: str, dataset: str, error_code: str): @@ -53 +47 @@ def test_splits(status: int, name: str, dataset: str, error_code: str): - elif name == "not-ready": + else: @@ -56,3 +50 @@ def test_splits(status: int, name: str, dataset: str, error_code: str): - r_splits = get(f"/splits?dataset={dataset}") - else: - r_splits = refresh_poll_splits(dataset) + r_splits = get(f"/splits?dataset={dataset}") if name == "not-ready" else poll_splits(dataset) diff --git a/e2e/tests/test_13_first_rows.py b/e2e/tests/test_13_first_rows.py index 342507f5..3074f4a6 100644 --- a/e2e/tests/test_13_first_rows.py +++ b/e2e/tests/test_13_first_rows.py @@ -15,2 +15,3 @@ from .utils import ( - refresh_poll_splits, - refresh_poll_splits_first_rows, + poll_first_rows, + poll_splits, + post_refresh, @@ -56,8 +56,0 @@ def test_first_rows(status: int, name: str, dataset: str, config: str, split: st - elif name.startswith("inexistent-") or name.startswith("private-") or name.startswith("gated-"): - refresh_poll_splits(dataset) - # no need to retry - r_rows = get(f"/first-rows?dataset={dataset}&config={config}&split={split}") - elif name == "not-ready": - refresh_poll_splits(dataset) - # poll the endpoint before the worker had the chance to process it - r_rows = get(f"/first-rows?dataset={dataset}&config={config}&split={split}") @@ -65 +58,7 @@ def test_first_rows(status: int, name: str, dataset: str, config: str, split: st - _, r_rows = refresh_poll_splits_first_rows(dataset, config, split) + post_refresh(dataset) + poll_splits(dataset) + if name == "not-ready": + # poll the endpoint before the worker had the chance to process it + r_rows = get(f"/first-rows?dataset={dataset}&config={config}&split={split}") + else: + r_rows = poll_first_rows(dataset, config, split) diff --git a/e2e/tests/test_21_api_metrics.py b/e2e/tests/test_21_api_metrics.py index ae81c77b..af87a786 100644 --- a/e2e/tests/test_21_api_metrics.py +++ b/e2e/tests/test_21_api_metrics.py @@ -6 +6 @@ import re -from typing import Dict +from typing import Mapping @@ -11 +11 @@ from .utils import API_URL, get -def has_metric(name: str, labels: Dict[str, str], metrics: set[str]) -> bool: +def has_metric(name: str, labels: Mapping[str, str], metrics: set[str]) -> bool: diff --git a/e2e/tests/test_31_admin_metrics.py b/e2e/tests/test_31_admin_metrics.py index 8d4e2e6e..206c1aa3 100644 --- a/e2e/tests/test_31_admin_metrics.py +++ b/e2e/tests/test_31_admin_metrics.py @@ -6 +6 @@ import re -from typing import Dict +from typing import Mapping @@ -11 +11 @@ from .utils import ADMIN_URL, get -def has_metric(name: str, labels: Dict[str, str], metrics: set[str]) -> bool: +def has_metric(name: str, labels: Mapping[str, str], metrics: set[str]) -> bool: diff --git a/e2e/tests/utils.py b/e2e/tests/utils.py index 52b63ffd..880b64d8 100644 --- a/e2e/tests/utils.py +++ b/e2e/tests/utils.py @@ -8 +8 @@ from pathlib import Path -from typing import Any, Dict, Optional, Tuple +from typing import Any, Mapping, Optional, Tuple @@ -22 +22 @@ API_URL = f"http://localhost:{API_UVICORN_PORT}" -Headers = Dict[str, str] +Headers = Mapping[str, str] @@ -69,8 +69,2 @@ def poll( -def post_refresh(dataset: str, headers: Headers = None) -> Response: - if headers is None: - headers = {} - return post( - "/webhook", - json={"event": "update", "repo": {"type": "dataset", "name": dataset}}, - headers=headers, - ) +def post_refresh(dataset: str) -> Response: + return post("/webhook", json={"event": "update", "repo": {"type": "dataset", "name": dataset}}) @@ -87,20 +80,0 @@ def poll_first_rows(dataset: str, config: str, split: str, headers: Headers = No -def refresh_poll_splits(dataset: str, headers: Headers = None) -> Response: - # ask for the dataset to be refreshed - response = post_refresh(dataset, headers=headers) - assert response.status_code == 200, f"{response.status_code} - {response.text}" - - # poll the /splits endpoint until we get something else than "The dataset is being processed. Retry later." - return poll_splits(dataset, headers=headers) - - -def refresh_poll_splits_first_rows( - dataset: str, config: str, split: str, headers: Headers = None -) -> Tuple[Response, Response]: - response_splits = refresh_poll_splits(dataset, headers=headers) - assert response_splits.status_code == 200, f"{response_splits.status_code} - {response_splits.text}" - - response_rows = poll_first_rows(dataset, config, split, headers=headers) - - return response_splits, response_rows - - diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile index aeb5c7c8..85f9d9fc 100644 --- a/jobs/mongodb_migration/Dockerfile +++ b/jobs/mongodb_migration/Dockerfile @@ -24,2 +23,0 @@ WORKDIR /src -COPY libs/libcache/dist ./libs/libcache/dist -COPY libs/libqueue/dist ./libs/libqueue/dist diff --git a/jobs/mongodb_migration/README.md b/jobs/mongodb_migration/README.md index e9e1db79..c1dcb004 100644 --- a/jobs/mongodb_migration/README.md +++ b/jobs/mongodb_migration/README.md @@ -16,8 +15,0 @@ Set environment variables to configure the job (`MONGODB_MIGRATION_` prefix): -### Cache - -See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. - -### Queue - -See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. - diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index c2ba669a..e614a601 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -85 +85 @@ description = "Python package for providing Mozilla's CA Bundle." -category = "dev" +category = "main" @@ -93 +93 @@ description = "The Real First Universal Charset Detector. Open, modern and activ -category = "dev" +category = "main" @@ -115 +115 @@ description = "Cross-platform colored terminal text." -category = "dev" +category = "main" @@ -195,0 +196,12 @@ test = ["pytest (>=6)"] +[[package]] +name = "filelock" +version = "3.8.0" +description = "A platform independent file lock." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] + @@ -211 +223 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -215 +227 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -248,0 +261,27 @@ lxml = ["lxml"] +[[package]] +name = "huggingface-hub" +version = "0.11.0" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +category = "main" +optional = false +python-versions = ">=3.7.0" + +[package.dependencies] +filelock = "*" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = "*" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] + @@ -253 +292 @@ description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" +category = "main" @@ -279,19 +317,0 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] -[[package]] -name = "libcache" -version = "0.4.3" -description = "Library for the cache in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} - -[package.source] -type = "file" -url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" - @@ -300 +320 @@ name = "libcommon" -version = "0.3.3" +version = "0.5.0" @@ -306,0 +327 @@ python-versions = "==3.9.6" +appdirs = ">=1.4.4,<2.0.0" @@ -308,16 +329 @@ environs = ">=9.5.0,<10.0.0" -orjson = ">=3.6.4,<4.0.0" - -[package.source] -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl" - -[[package]] -name = "libqueue" -version = "0.4.13" -description = "Library for the jobs queue in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -environs = ">=9.5.0,<10.0.0" +huggingface-hub = ">=0.11.0,<0.12.0" @@ -326 +332 @@ mongoengine = ">=0.24.1,<0.25.0" -packaging = ">=21.3,<22.0" +orjson = ">=3.6.4,<4.0.0" @@ -332 +338 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl" @@ -679 +685 @@ description = "YAML parser and emitter for Python" -category = "dev" +category = "main" @@ -687 +693 @@ description = "Python HTTP for Humans." -category = "dev" +category = "main" @@ -732 +738 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -801,0 +808,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "tqdm" +version = "4.64.1" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + @@ -814 +837 @@ description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" +category = "main" @@ -822 +845 @@ description = "HTTP library with thread-safe connection pooling, file post, and -category = "dev" +category = "main" @@ -842 +865 @@ python-versions = "3.9.6" -content-hash = "d4fecce5ce9d0f7f3639f95e30c76cd8e16be91f46121d8939916da0d6748219" +content-hash = "e5eb92e8b71d1e68a29761ed4d1e58c3c03c5fe600f835584a19369a64c6d5c0" @@ -971,0 +995,4 @@ exceptiongroup = [ +filelock = [ + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, +] @@ -977,2 +1004,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -987,0 +1015,4 @@ html5lib = [ +huggingface-hub = [ + {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, + {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, +] @@ -1000,3 +1030,0 @@ isort = [ -libcache = [ - {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, -] @@ -1004,4 +1032 @@ libcommon = [ - {file = "libcommon-0.3.3-py3-none-any.whl", hash = "sha256:b56b6e48bb2c92c3dfc619fc25718db930bc356f1e54fb9f8a67ba4597cb499d"}, -] -libqueue = [ - {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, + {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:0267504716992f562382ff5029ace87444fd12793f2393f3800921d384a0fd52"}, @@ -1416,2 +1441,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, @@ -1446,0 +1472,4 @@ tomlkit = [ +tqdm = [ + {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"}, + {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, +] diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index c220cbac..9680d852 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10,3 +10 @@ environs = "^9.5.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl", develop = false } diff --git a/jobs/mongodb_migration/src/mongodb_migration/config.py b/jobs/mongodb_migration/src/mongodb_migration/config.py index 39c6051d..e00b74ee 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/config.py +++ b/jobs/mongodb_migration/src/mongodb_migration/config.py @@ -5,3 +5 @@ from environs import Env -from libcache.config import CacheConfig -from libcommon.config import CommonConfig -from libqueue.config import QueueConfig +from libcommon.config import CacheConfig, CommonConfig, QueueConfig diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py index 8fb8fe0e..74d59e4f 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py @@ -44 +44 @@ class MigrationAddForceToJob(Migration): -# copied from libqueue.queue.Job, as a snapshot of when the migration was created +# copied from libcommon.queue.Job, as a snapshot of when the migration was created diff --git a/libs/libcache/.flake8 b/libs/libcache/.flake8 deleted file mode 100644 index f7d6157c..00000000 --- a/libs/libcache/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -# Recommend matching the black line length (119), -# rather than using the flake8 default of 79: -max-line-length = 119 -extend-ignore = "E203" diff --git a/libs/libcache/.python-version b/libs/libcache/.python-version deleted file mode 100644 index 1635d0f5..00000000 --- a/libs/libcache/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.6 diff --git a/libs/libcache/Makefile b/libs/libcache/Makefile deleted file mode 100644 index ede55b5d..00000000 --- a/libs/libcache/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -# environment variables for the commands (docker-compose, poetry) -export MONGO_PORT := 27020 -export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} -export COMPOSE_PROJECT_NAME := libcache -# makefile variables -DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml - -include ../../tools/Python.mk -include ../../tools/PythonAudit.mk -include ../../tools/PythonTest.mk -include ../../tools/Docker.mk diff --git a/libs/libcache/README.md b/libs/libcache/README.md deleted file mode 100644 index 711d278d..00000000 --- a/libs/libcache/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# libcache - -A Python library to manage the storage of precomputed API responses in a mongo database (the "cache"). - -## Configuration - -Set environment variables to configure the following aspects: - -- `CACHE_ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, in which case the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. -- `CACHE_MONGO_DATABASE`: the name of the database used for storing the cache. Defaults to `"datasets_server_cache"`. -- `CACHE_MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. diff --git a/libs/libcache/dist/libcache-0.1.0-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.0-py3-none-any.whl deleted file mode 100644 index 70e6d86a..00000000 Binary files a/libs/libcache/dist/libcache-0.1.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.0.tar.gz b/libs/libcache/dist/libcache-0.1.0.tar.gz deleted file mode 100644 index 1037ba43..00000000 Binary files a/libs/libcache/dist/libcache-0.1.0.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.1-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.1-py3-none-any.whl deleted file mode 100644 index 87dced16..00000000 Binary files a/libs/libcache/dist/libcache-0.1.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.1.tar.gz b/libs/libcache/dist/libcache-0.1.1.tar.gz deleted file mode 100644 index 44929909..00000000 Binary files a/libs/libcache/dist/libcache-0.1.1.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.10-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.10-py3-none-any.whl deleted file mode 100644 index fbc94769..00000000 Binary files a/libs/libcache/dist/libcache-0.1.10-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.10.tar.gz b/libs/libcache/dist/libcache-0.1.10.tar.gz deleted file mode 100644 index 70624903..00000000 Binary files a/libs/libcache/dist/libcache-0.1.10.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.11-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.11-py3-none-any.whl deleted file mode 100644 index 8bbd564d..00000000 Binary files a/libs/libcache/dist/libcache-0.1.11-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.11.tar.gz b/libs/libcache/dist/libcache-0.1.11.tar.gz deleted file mode 100644 index 58df8bf1..00000000 Binary files a/libs/libcache/dist/libcache-0.1.11.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.12-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.12-py3-none-any.whl deleted file mode 100644 index 9a05486f..00000000 Binary files a/libs/libcache/dist/libcache-0.1.12-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.12.tar.gz b/libs/libcache/dist/libcache-0.1.12.tar.gz deleted file mode 100644 index ec870f81..00000000 Binary files a/libs/libcache/dist/libcache-0.1.12.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.13-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.13-py3-none-any.whl deleted file mode 100644 index 838d9345..00000000 Binary files a/libs/libcache/dist/libcache-0.1.13-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.13.tar.gz b/libs/libcache/dist/libcache-0.1.13.tar.gz deleted file mode 100644 index 8c25b4f7..00000000 Binary files a/libs/libcache/dist/libcache-0.1.13.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.14-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.14-py3-none-any.whl deleted file mode 100644 index 4ec0d94f..00000000 Binary files a/libs/libcache/dist/libcache-0.1.14-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.14.tar.gz b/libs/libcache/dist/libcache-0.1.14.tar.gz deleted file mode 100644 index 757c0d20..00000000 Binary files a/libs/libcache/dist/libcache-0.1.14.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.15-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.15-py3-none-any.whl deleted file mode 100644 index 582aa370..00000000 Binary files a/libs/libcache/dist/libcache-0.1.15-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.15.tar.gz b/libs/libcache/dist/libcache-0.1.15.tar.gz deleted file mode 100644 index 5f4a5fdd..00000000 Binary files a/libs/libcache/dist/libcache-0.1.15.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.16-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.16-py3-none-any.whl deleted file mode 100644 index b66cf200..00000000 Binary files a/libs/libcache/dist/libcache-0.1.16-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.16.tar.gz b/libs/libcache/dist/libcache-0.1.16.tar.gz deleted file mode 100644 index 40cb457a..00000000 Binary files a/libs/libcache/dist/libcache-0.1.16.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.17-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.17-py3-none-any.whl deleted file mode 100644 index 9a580720..00000000 Binary files a/libs/libcache/dist/libcache-0.1.17-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.17.tar.gz b/libs/libcache/dist/libcache-0.1.17.tar.gz deleted file mode 100644 index b37f3fc9..00000000 Binary files a/libs/libcache/dist/libcache-0.1.17.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.18-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.18-py3-none-any.whl deleted file mode 100644 index 4de5ff96..00000000 Binary files a/libs/libcache/dist/libcache-0.1.18-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.18.tar.gz b/libs/libcache/dist/libcache-0.1.18.tar.gz deleted file mode 100644 index 091b5ddf..00000000 Binary files a/libs/libcache/dist/libcache-0.1.18.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.19-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.19-py3-none-any.whl deleted file mode 100644 index bc90744c..00000000 Binary files a/libs/libcache/dist/libcache-0.1.19-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.19.tar.gz b/libs/libcache/dist/libcache-0.1.19.tar.gz deleted file mode 100644 index fcae4a79..00000000 Binary files a/libs/libcache/dist/libcache-0.1.19.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.2-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.2-py3-none-any.whl deleted file mode 100644 index 18eb75aa..00000000 Binary files a/libs/libcache/dist/libcache-0.1.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.2.tar.gz b/libs/libcache/dist/libcache-0.1.2.tar.gz deleted file mode 100644 index bef0d1e1..00000000 Binary files a/libs/libcache/dist/libcache-0.1.2.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.20-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.20-py3-none-any.whl deleted file mode 100644 index 774cbd76..00000000 Binary files a/libs/libcache/dist/libcache-0.1.20-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.20.tar.gz b/libs/libcache/dist/libcache-0.1.20.tar.gz deleted file mode 100644 index 5b64f421..00000000 Binary files a/libs/libcache/dist/libcache-0.1.20.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.21-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.21-py3-none-any.whl deleted file mode 100644 index 29065c3a..00000000 Binary files a/libs/libcache/dist/libcache-0.1.21-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.21.tar.gz b/libs/libcache/dist/libcache-0.1.21.tar.gz deleted file mode 100644 index 2a2358d5..00000000 Binary files a/libs/libcache/dist/libcache-0.1.21.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.22-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.22-py3-none-any.whl deleted file mode 100644 index f2145f7e..00000000 Binary files a/libs/libcache/dist/libcache-0.1.22-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.22.tar.gz b/libs/libcache/dist/libcache-0.1.22.tar.gz deleted file mode 100644 index c3f7e43f..00000000 Binary files a/libs/libcache/dist/libcache-0.1.22.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.23-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.23-py3-none-any.whl deleted file mode 100644 index f2dfd8b5..00000000 Binary files a/libs/libcache/dist/libcache-0.1.23-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.23.tar.gz b/libs/libcache/dist/libcache-0.1.23.tar.gz deleted file mode 100644 index 325a89cf..00000000 Binary files a/libs/libcache/dist/libcache-0.1.23.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.24-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.24-py3-none-any.whl deleted file mode 100644 index f5ada4a5..00000000 Binary files a/libs/libcache/dist/libcache-0.1.24-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.24.tar.gz b/libs/libcache/dist/libcache-0.1.24.tar.gz deleted file mode 100644 index 65bb4b76..00000000 Binary files a/libs/libcache/dist/libcache-0.1.24.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.25-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.25-py3-none-any.whl deleted file mode 100644 index 8b99819c..00000000 Binary files a/libs/libcache/dist/libcache-0.1.25-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.25.tar.gz b/libs/libcache/dist/libcache-0.1.25.tar.gz deleted file mode 100644 index 0976c822..00000000 Binary files a/libs/libcache/dist/libcache-0.1.25.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.26-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.26-py3-none-any.whl deleted file mode 100644 index eec362b7..00000000 Binary files a/libs/libcache/dist/libcache-0.1.26-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.26.tar.gz b/libs/libcache/dist/libcache-0.1.26.tar.gz deleted file mode 100644 index 9de122cb..00000000 Binary files a/libs/libcache/dist/libcache-0.1.26.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.27-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.27-py3-none-any.whl deleted file mode 100644 index b5cac75f..00000000 Binary files a/libs/libcache/dist/libcache-0.1.27-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.27.tar.gz b/libs/libcache/dist/libcache-0.1.27.tar.gz deleted file mode 100644 index 62e31860..00000000 Binary files a/libs/libcache/dist/libcache-0.1.27.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.28-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.28-py3-none-any.whl deleted file mode 100644 index e5f8649b..00000000 Binary files a/libs/libcache/dist/libcache-0.1.28-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.28.tar.gz b/libs/libcache/dist/libcache-0.1.28.tar.gz deleted file mode 100644 index 724acbc9..00000000 Binary files a/libs/libcache/dist/libcache-0.1.28.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.3-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.3-py3-none-any.whl deleted file mode 100644 index 80af014f..00000000 Binary files a/libs/libcache/dist/libcache-0.1.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.3.tar.gz b/libs/libcache/dist/libcache-0.1.3.tar.gz deleted file mode 100644 index 30a3a3bf..00000000 Binary files a/libs/libcache/dist/libcache-0.1.3.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.4-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.4-py3-none-any.whl deleted file mode 100644 index e152057f..00000000 Binary files a/libs/libcache/dist/libcache-0.1.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.4.tar.gz b/libs/libcache/dist/libcache-0.1.4.tar.gz deleted file mode 100644 index 4148b818..00000000 Binary files a/libs/libcache/dist/libcache-0.1.4.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.5-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.5-py3-none-any.whl deleted file mode 100644 index 03661d8e..00000000 Binary files a/libs/libcache/dist/libcache-0.1.5-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.5.tar.gz b/libs/libcache/dist/libcache-0.1.5.tar.gz deleted file mode 100644 index b1008bf5..00000000 Binary files a/libs/libcache/dist/libcache-0.1.5.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.6-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.6-py3-none-any.whl deleted file mode 100644 index 50513c4a..00000000 Binary files a/libs/libcache/dist/libcache-0.1.6-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.6.tar.gz b/libs/libcache/dist/libcache-0.1.6.tar.gz deleted file mode 100644 index f0691d52..00000000 Binary files a/libs/libcache/dist/libcache-0.1.6.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.7-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.7-py3-none-any.whl deleted file mode 100644 index bc5f3e32..00000000 Binary files a/libs/libcache/dist/libcache-0.1.7-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.7.tar.gz b/libs/libcache/dist/libcache-0.1.7.tar.gz deleted file mode 100644 index 5c67ceec..00000000 Binary files a/libs/libcache/dist/libcache-0.1.7.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.8-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.8-py3-none-any.whl deleted file mode 100644 index 6ae05135..00000000 Binary files a/libs/libcache/dist/libcache-0.1.8-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.8.tar.gz b/libs/libcache/dist/libcache-0.1.8.tar.gz deleted file mode 100644 index 57229dd2..00000000 Binary files a/libs/libcache/dist/libcache-0.1.8.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.9-py3-none-any.whl b/libs/libcache/dist/libcache-0.1.9-py3-none-any.whl deleted file mode 100644 index ad88a698..00000000 Binary files a/libs/libcache/dist/libcache-0.1.9-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.1.9.tar.gz b/libs/libcache/dist/libcache-0.1.9.tar.gz deleted file mode 100644 index 03e3adc7..00000000 Binary files a/libs/libcache/dist/libcache-0.1.9.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.2.0-py3-none-any.whl b/libs/libcache/dist/libcache-0.2.0-py3-none-any.whl deleted file mode 100644 index 99c1af6b..00000000 Binary files a/libs/libcache/dist/libcache-0.2.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.2.0.tar.gz b/libs/libcache/dist/libcache-0.2.0.tar.gz deleted file mode 100644 index a40a4dd2..00000000 Binary files a/libs/libcache/dist/libcache-0.2.0.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.2.1-py3-none-any.whl b/libs/libcache/dist/libcache-0.2.1-py3-none-any.whl deleted file mode 100644 index fb6ff49f..00000000 Binary files a/libs/libcache/dist/libcache-0.2.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.2.1.tar.gz b/libs/libcache/dist/libcache-0.2.1.tar.gz deleted file mode 100644 index e6d2b0f9..00000000 Binary files a/libs/libcache/dist/libcache-0.2.1.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.2.2-py3-none-any.whl b/libs/libcache/dist/libcache-0.2.2-py3-none-any.whl deleted file mode 100644 index 1c3115f8..00000000 Binary files a/libs/libcache/dist/libcache-0.2.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.2.2.tar.gz b/libs/libcache/dist/libcache-0.2.2.tar.gz deleted file mode 100644 index c09f9c17..00000000 Binary files a/libs/libcache/dist/libcache-0.2.2.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.0-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.0-py3-none-any.whl deleted file mode 100644 index a89d3efa..00000000 Binary files a/libs/libcache/dist/libcache-0.3.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.0.tar.gz b/libs/libcache/dist/libcache-0.3.0.tar.gz deleted file mode 100644 index bb2264fd..00000000 Binary files a/libs/libcache/dist/libcache-0.3.0.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.1-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.1-py3-none-any.whl deleted file mode 100644 index c9759c18..00000000 Binary files a/libs/libcache/dist/libcache-0.3.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.1.tar.gz b/libs/libcache/dist/libcache-0.3.1.tar.gz deleted file mode 100644 index 9346caff..00000000 Binary files a/libs/libcache/dist/libcache-0.3.1.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.2-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.2-py3-none-any.whl deleted file mode 100644 index 077036b5..00000000 Binary files a/libs/libcache/dist/libcache-0.3.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.2.tar.gz b/libs/libcache/dist/libcache-0.3.2.tar.gz deleted file mode 100644 index 196ff086..00000000 Binary files a/libs/libcache/dist/libcache-0.3.2.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.3-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.3-py3-none-any.whl deleted file mode 100644 index 91702f7b..00000000 Binary files a/libs/libcache/dist/libcache-0.3.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.3.tar.gz b/libs/libcache/dist/libcache-0.3.3.tar.gz deleted file mode 100644 index 6896a00d..00000000 Binary files a/libs/libcache/dist/libcache-0.3.3.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.4-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.4-py3-none-any.whl deleted file mode 100644 index 871da54d..00000000 Binary files a/libs/libcache/dist/libcache-0.3.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.3.4.tar.gz b/libs/libcache/dist/libcache-0.3.4.tar.gz deleted file mode 100644 index f6ba3fae..00000000 Binary files a/libs/libcache/dist/libcache-0.3.4.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.0-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.0-py3-none-any.whl deleted file mode 100644 index 06a00313..00000000 Binary files a/libs/libcache/dist/libcache-0.4.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.0.tar.gz b/libs/libcache/dist/libcache-0.4.0.tar.gz deleted file mode 100644 index 62741a68..00000000 Binary files a/libs/libcache/dist/libcache-0.4.0.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.0a0-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.0a0-py3-none-any.whl deleted file mode 100644 index 8d6bbd7f..00000000 Binary files a/libs/libcache/dist/libcache-0.4.0a0-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.0a0.tar.gz b/libs/libcache/dist/libcache-0.4.0a0.tar.gz deleted file mode 100644 index 599e1e82..00000000 Binary files a/libs/libcache/dist/libcache-0.4.0a0.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.1-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.1-py3-none-any.whl deleted file mode 100644 index 826a3a61..00000000 Binary files a/libs/libcache/dist/libcache-0.4.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.1.tar.gz b/libs/libcache/dist/libcache-0.4.1.tar.gz deleted file mode 100644 index 4e005fb6..00000000 Binary files a/libs/libcache/dist/libcache-0.4.1.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.2-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.2-py3-none-any.whl deleted file mode 100644 index 76507edd..00000000 Binary files a/libs/libcache/dist/libcache-0.4.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.2.tar.gz b/libs/libcache/dist/libcache-0.4.2.tar.gz deleted file mode 100644 index e9def6cc..00000000 Binary files a/libs/libcache/dist/libcache-0.4.2.tar.gz and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.3-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.3-py3-none-any.whl deleted file mode 100644 index fbf5ec20..00000000 Binary files a/libs/libcache/dist/libcache-0.4.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libcache/dist/libcache-0.4.3.tar.gz b/libs/libcache/dist/libcache-0.4.3.tar.gz deleted file mode 100644 index d8c4c5da..00000000 Binary files a/libs/libcache/dist/libcache-0.4.3.tar.gz and /dev/null differ diff --git a/libs/libcache/poetry.lock b/libs/libcache/poetry.lock deleted file mode 100644 index ac577c4a..00000000 --- a/libs/libcache/poetry.lock +++ /dev/null @@ -1,1340 +0,0 @@ -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - -[[package]] -name = "black" -version = "22.10.0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cachecontrol" -version = "0.12.11" -description = "httplib2 caching for requests" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -lockfile = {version = ">=0.9", optional = true, markers = "extra == \"filecache\""} -msgpack = ">=0.5.2" -requests = "*" - -[package.extras] -filecache = ["lockfile (>=0.9)"] -redis = ["redis (>=2.10.5)"] - -[[package]] -name = "certifi" -version = "2022.9.24" -description = "Python package for providing Mozilla's CA Bundle." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.1.3" -description = "Composable command line interface toolkit" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" - -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cyclonedx-python-lib" -version = "3.1.0" -description = "A library for producing CycloneDX SBOM (Software Bill of Materials) files." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -packageurl-python = ">=0.9" -setuptools = ">=47.0.0" -sortedcontainers = ">=2.4.0,<3.0.0" -toml = ">=0.10.0,<0.11.0" - -[[package]] -name = "dnspython" -version = "1.16.0" -description = "DNS toolkit" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -dnssec = ["ecdsa (>=0.13)", "pycryptodome"] -idna = ["idna (>=2.1)"] - -[[package]] -name = "environs" -version = "9.5.0" -description = "simplified environment variable parsing" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -marshmallow = ">=3.0.0" -python-dotenv = "*" - -[package.extras] -dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] -tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] - -[[package]] -name = "exceptiongroup" -version = "1.0.4" -description = "Backport of PEP 654 (exception groups)" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[[package]] -name = "html5lib" -version = "1.1" -description = "HTML parser based on the WHATWG HTML specification" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -six = ">=1.9" -webencodings = "*" - -[package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "isort" -version = "5.10.1" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.6.1,<4.0" - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "lockfile" -version = "0.12.2" -description = "Platform-independent file locking module" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "marshmallow" -version = "3.19.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mongo-types" -version = "0.15.1" -description = "Type stubs for mongoengine w/ basic support for bson and pymongo" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[[package]] -name = "mongoengine" -version = "0.24.2" -description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pymongo = ">=3.4,<5.0" - -[[package]] -name = "msgpack" -version = "1.0.4" -description = "MessagePack serializer" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mypy" -version = "0.812" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -typed-ast = ">=1.4.0,<1.5.0" -typing-extensions = ">=3.7.4" - -[package.extras] -dmypy = ["psutil (>=4.0)"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "packageurl-python" -version = "0.10.4" -description = "A purl aka. Package URL parser and builder" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -build = ["wheel"] -test = ["black", "isort", "pytest"] - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pathspec" -version = "0.10.2" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pbr" -version = "5.11.0" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" - -[[package]] -name = "pip" -version = "22.3.1" -description = "The PyPA recommended tool for installing Python packages." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pip-api" -version = "0.0.30" -description = "An unofficial, importable pip API" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pip = "*" - -[[package]] -name = "pip-audit" -version = "2.4.6" -description = "A tool for scanning Python environments for known vulnerabilities" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -CacheControl = {version = ">=0.12.10", extras = ["filecache"]} -cyclonedx-python-lib = ">=2.0.0,<2.5.0 || >2.5.0" -html5lib = ">=1.1" -packaging = ">=21.0.0" -pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" -resolvelib = ">=0.8.0" -rich = ">=12.4" -toml = ">=0.10" - -[package.extras] -dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] -test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] - -[[package]] -name = "pip-requirements-parser" -version = "31.2.0" -description = "pip requirements parser - a mostly correct pip requirements parsing library because it uses pip's own code." -category = "dev" -optional = false -python-versions = ">=3.6.*" - -[package.dependencies] -packaging = "*" - -[package.extras] -docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] - -[[package]] -name = "platformdirs" -version = "2.5.4" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "poetryup" -version = "0.3.15" -description = "Update dependencies and bump their version in the pyproject.toml file" -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -tomlkit = ">=0.7.2,<0.8.0" - -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pymongo" -version = "3.13.0" -description = "Python driver for MongoDB <http://www.mongodb.org>" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} - -[package.extras] -aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] -gssapi = ["pykerberos"] -ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] -snappy = ["python-snappy"] -srv = ["dnspython (>=1.16.0,<1.17.0)"] -tls = ["ipaddress"] -zstd = ["zstandard"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "2.12.1" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -coverage = ">=5.2.1" -pytest = ">=4.6" -toml = "*" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dotenv" -version = "0.21.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "dev" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "resolvelib" -version = "0.9.0" -description = "Resolve abstract dependencies into concrete ones" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -examples = ["html5lib", "packaging", "pygraphviz", "requests"] -lint = ["black", "flake8", "isort", "mypy", "types-requests"] -release = ["build", "towncrier", "twine"] -test = ["commentjson", "packaging", "pytest"] - -[[package]] -name = "rich" -version = "12.6.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" -optional = false -python-versions = ">=3.6.3,<4.0.0" - -[package.dependencies] -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] - -[[package]] -name = "setuptools" -version = "65.6.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "stevedore" -version = "4.1.1" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tomlkit" -version = "0.7.2" -description = "Style preserving TOML library" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "typed-ast" -version = "1.4.3" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" - -[metadata] -lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "3362e9488ab16ced56bbc6ce4d38fc36bdc7c7cf11ad73818788a373a486cca6" - -[metadata.files] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -bandit = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -cachecontrol = [ - {file = "CacheControl-0.12.11-py2.py3-none-any.whl", hash = "sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b"}, - {file = "CacheControl-0.12.11.tar.gz", hash = "sha256:a5b9fcc986b184db101aa280b42ecdcdfc524892596f606858e0b7a8b4d9e144"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -commonmark = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, -] -dnspython = [ - {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, - {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, -] -environs = [ - {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, - {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, -] -exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, -] -flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] -gitdb = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] -gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, -] -html5lib = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -lockfile = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] -marshmallow = [ - {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, - {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mongo-types = [ - {file = "mongo-types-0.15.1.tar.gz", hash = "sha256:0a9deeb7733ea7da5db3711d92e22d93556b522f860bbff82e5df44c53bd06a9"}, - {file = "mongo_types-0.15.1-py3-none-any.whl", hash = "sha256:9417ae5b9a759c09630b5ec7d66904cc333c2d2fcfe75e2760a332ed5e267309"}, -] -mongoengine = [ - {file = "mongoengine-0.24.2-py3-none-any.whl", hash = "sha256:f5c4e1b206b2ccffe4adc7a6283ed26dd799bd115a5fb1d2e885a075132cdb88"}, - {file = "mongoengine-0.24.2.tar.gz", hash = "sha256:c76d49658575bb995682e2e77c8ef7cda63faf939415b32ee923745d120f8b02"}, -] -msgpack = [ - {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, - {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, - {file = "msgpack-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db"}, - {file = "msgpack-1.0.4-cp310-cp310-win32.whl", hash = "sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef"}, - {file = "msgpack-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075"}, - {file = "msgpack-1.0.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae"}, - {file = "msgpack-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6"}, - {file = "msgpack-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661"}, - {file = "msgpack-1.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236"}, - {file = "msgpack-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44"}, - {file = "msgpack-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243"}, - {file = "msgpack-1.0.4-cp38-cp38-win32.whl", hash = "sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2"}, - {file = "msgpack-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae"}, - {file = "msgpack-1.0.4-cp39-cp39-win32.whl", hash = "sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c"}, - {file = "msgpack-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce"}, - {file = "msgpack-1.0.4.tar.gz", hash = "sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f"}, -] -mypy = [ - {file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"}, - {file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"}, - {file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"}, - {file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"}, - {file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"}, - {file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"}, - {file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"}, - {file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"}, - {file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"}, - {file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"}, - {file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"}, - {file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"}, - {file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"}, - {file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"}, - {file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"}, - {file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"}, - {file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"}, - {file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"}, - {file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"}, - {file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"}, - {file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"}, - {file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packageurl-python = [ - {file = "packageurl-python-0.10.4.tar.gz", hash = "sha256:5c91334f942cd55d45eb0c67dd339a535ef90e25f05b9ec016ad188ed0ef9048"}, - {file = "packageurl_python-0.10.4-py3-none-any.whl", hash = "sha256:bf8a1ffe755634776f6563904d792fb0aa13b377fc86115c36fe17f69b6e59db"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, -] -pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, -] -pip = [ - {file = "pip-22.3.1-py3-none-any.whl", hash = "sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077"}, - {file = "pip-22.3.1.tar.gz", hash = "sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38"}, -] -pip-api = [ - {file = "pip-api-0.0.30.tar.gz", hash = "sha256:a05df2c7aa9b7157374bcf4273544201a0c7bae60a9c65bcf84f3959ef3896f3"}, - {file = "pip_api-0.0.30-py3-none-any.whl", hash = "sha256:2a0314bd31522eb9ffe8a99668b0d07fee34ebc537931e7b6483001dbedcbdc9"}, -] -pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, -] -pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, -] -platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -poetryup = [ - {file = "poetryup-0.3.15-py3-none-any.whl", hash = "sha256:db068f55d10c0f89c76ea2b62c6bb81c0b0512454f7a83bdc0a13c146e5fb13e"}, - {file = "poetryup-0.3.15.tar.gz", hash = "sha256:efa4e7bb0cd005db4aff3cc678c8bfba9474ef42d5759c0168f2a55fc0f17bc3"}, -] -pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] -pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] -pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pymongo = [ - {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, - {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, - {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, - {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, - {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, - {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, - {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, - {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, - {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, - {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, - {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, - {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, - {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, - {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, - {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, - {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, - {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, - {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, - {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, - {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, - {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, - {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, - {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, - {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, - {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] -pytest-cov = [ - {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, - {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, -] -python-dotenv = [ - {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, - {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -resolvelib = [ - {file = "resolvelib-0.9.0-py2.py3-none-any.whl", hash = "sha256:597adcbdf81d62d0cde55d90faa8e79187ec0f18e5012df30bd7a751b26343ae"}, - {file = "resolvelib-0.9.0.tar.gz", hash = "sha256:40ab05117c3281b1b160105e10075094c5ab118315003c922b77673a365290e1"}, -] -rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, -] -setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -smmap = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] -sortedcontainers = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] -stevedore = [ - {file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"}, - {file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tomlkit = [ - {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"}, - {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"}, -] -typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] diff --git a/libs/libcache/poetry.toml b/libs/libcache/poetry.toml deleted file mode 100644 index 5fcef8cd..00000000 --- a/libs/libcache/poetry.toml +++ /dev/null @@ -1,3 +0,0 @@ -[virtualenvs] -in-project = true -prefer-active-python = true diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml deleted file mode 100644 index e657b981..00000000 --- a/libs/libcache/pyproject.toml +++ /dev/null @@ -1,45 +0,0 @@ -[tool.poetry] -authors = ["Sylvain Lesage <[email protected]>"] -description = "Library for the cache in mongodb" -name = "libcache" -version = "0.4.3" -license = "Apache-2.0" - -[tool.poetry.dependencies] -appdirs = "^1.4.4" -environs = "^9.5.0" -mongo-types = "0.15.1" -mongoengine = "^0.24.1" -pymongo = { extras = ["srv"], version = "^3.13.0" } -python = "3.9.6" - -[tool.poetry.group.dev.dependencies] -bandit = "^1.7.0" -black = "^22.1.0" -flake8 = "^3.9.2" -isort = "^5.9.3" -mypy = "0.812" -pip-audit = "^2.4.6" -poetryup = "^0.3.8" -pytest = "^7.2.0" -pytest-cov = "^2.12.1" - -[build-system] -build-backend = "poetry.core.masonry.api" -requires = ["poetry-core>=1.0.0"] - -[tool.pytest.ini_options] -filterwarnings = ["ignore::DeprecationWarning"] - -[tool.coverage.run] -source = ["libcache"] - -[tool.isort] -profile = "black" - -[tool.black] -line-length = 119 -preview = true - -[tool.mypy] -strict = true diff --git a/libs/libcache/src/libcache/__init__.py b/libs/libcache/src/libcache/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/libs/libcache/src/libcache/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/libs/libcache/src/libcache/config.py b/libs/libcache/src/libcache/config.py deleted file mode 100644 index 26c2612f..00000000 --- a/libs/libcache/src/libcache/config.py +++ /dev/null @@ -1,28 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Optional - -from environs import Env - -from libcache.asset import init_assets_dir -from libcache.simple_cache import connect_to_database - - -class CacheConfig: - _assets_directory: Optional[str] - assets_directory: str - mongo_database: str - mongo_url: str - - def __init__(self): - env = Env(expand_vars=True) - with env.prefixed("CACHE_"): - self._assets_directory = env.str(name="ASSETS_DIRECTORY", default=None) - self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_cache") - self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") - self.setup() - - def setup(self): - connect_to_database(database=self.mongo_database, host=self.mongo_url) - self.assets_directory = init_assets_dir(assets_directory=self._assets_directory) diff --git a/libs/libcache/src/libcache/py.typed b/libs/libcache/src/libcache/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/libs/libcache/tests/__init__.py b/libs/libcache/tests/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/libs/libcache/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/libs/libcache/tests/conftest.py b/libs/libcache/tests/conftest.py deleted file mode 100644 index 27549e33..00000000 --- a/libs/libcache/tests/conftest.py +++ /dev/null @@ -1,23 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from pytest import MonkeyPatch, fixture - -from libcache.config import CacheConfig - - -# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 -@fixture(scope="session") -def monkeypatch_session(): - monkeypatch_session = MonkeyPatch() - monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") - yield monkeypatch_session - monkeypatch_session.undo() - - -@fixture(scope="session", autouse=True) -def cache_config(monkeypatch_session: MonkeyPatch) -> CacheConfig: - cache_config = CacheConfig() - if "test" not in cache_config.mongo_database: - raise ValueError("Test must be launched on a test mongo database") - return cache_config diff --git a/libs/libcommon/Makefile b/libs/libcommon/Makefile index 21dc4e83..b2e17e4d 100644 --- a/libs/libcommon/Makefile +++ b/libs/libcommon/Makefile @@ -1,0 +2,3 @@ +export MONGO_PORT := 27020 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} @@ -4 +7 @@ export COMPOSE_PROJECT_NAME := libcommon -DOCKER_COMPOSE := ../../tools/docker-compose-empty.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml diff --git a/libs/libcommon/README.md b/libs/libcommon/README.md index b5e9f742..42319f7c 100644 --- a/libs/libcommon/README.md +++ b/libs/libcommon/README.md @@ -3 +3 @@ -A Python library with common code (configuration, utils, logging, exceptions) used by the services and the workers +A Python library with common code (cache, queue, workers logic, processing steps, configuration, utils, logging, exceptions) used by the services and the workers @@ -5 +5 @@ A Python library with common code (configuration, utils, logging, exceptions) us -## Configuration +## Common configuration @@ -7 +7 @@ A Python library with common code (configuration, utils, logging, exceptions) us -Set environment variables to configure the following aspects: +Set the common environment variables to configure the following aspects: @@ -9 +9 @@ Set environment variables to configure the following aspects: -- `COMMON_ASSETS_BASE_URL`: base URL for the assets files. It should be set accordingly to the datasets-server domain, eg https://datasets-server.huggingface.co/assets. Defaults to `assets`. +- `COMMON_ASSETS_BASE_URL`: base URL for the assets files. Set accordingly to the datasets-server domain, e.g., https://datasets-server.huggingface.co/assets. Defaults to `assets`. @@ -11,2 +11,26 @@ Set environment variables to configure the following aspects: -- `COMMON_HF_TOKEN`: App Access Token (ask moonlanding administrators to get one, only the `read` role is required), to access the gated datasets. Defaults to empty. -- `COMMON_LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. Defaults to `INFO`. +- `COMMON_HF_TOKEN`: App Access Token (ask moonlanding administrators to get one, only the `read` role is required) to access the gated datasets. Defaults to empty. +- `COMMON_LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR`, and `CRITICAL`. Defaults to `INFO`. + +## Cache configuration + +Set environment variables to configure the storage of precomputed API responses in a MongoDB database (the "cache"): + +- `CACHE_ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, which means the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. +- `CACHE_MONGO_DATABASE`: name of the database used for storing the cache. Defaults to `datasets_server_cache`. +- `CACHE_MONGO_URL`: URL used to connect to the MongoDB server. Defaults to `mongodb://localhost:27017`. + +## Queue configuration + +Set environment variables to configure the job queues to precompute API responses. The job queues are stored in a MongoDB database. + +- `QUEUE_MAX_JOBS_PER_NAMESPACE`: maximum number of started jobs for the same namespace (the user or organization, before the `/` separator in the dataset name, or the "canonical" dataset name if not present). Defaults to 1. +- `QUEUE_MONGO_DATABASE`: name of the database used for storing the queue. Defaults to `datasets_server_queue`. +- `QUEUE_MONGO_URL`: URL used to connect to the MongoDB server. Defaults to `mongodb://localhost:27017`. + +## Worker configuration + +Set environment variables to configure the worker that processes the queue. + +- `WORKER_MAX_LOAD_PCT`: maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of CPUs \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. +- `WORKER_MAX_MEMORY_PCT`: maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. +- `WORKER_SLEEP_SECONDS`: duration in seconds that a worker waits at each loop iteration before checking if resources are available and processing a job if any is available. Note that the worker does not sleep on the first loop after finishing a job. Defaults to `15`. diff --git a/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl index e965d872..8dd8d15d 100644 Binary files a/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl and b/libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.3.3.tar.gz b/libs/libcommon/dist/libcommon-0.3.3.tar.gz index 93aa6610..b363e0c7 100644 Binary files a/libs/libcommon/dist/libcommon-0.3.3.tar.gz and b/libs/libcommon/dist/libcommon-0.3.3.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.3.4-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.4-py3-none-any.whl new file mode 100644 index 00000000..ad4f6645 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.4-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.3.4.tar.gz b/libs/libcommon/dist/libcommon-0.3.4.tar.gz new file mode 100644 index 00000000..12a032b8 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.4.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.4.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.0-py3-none-any.whl new file mode 100644 index 00000000..3d3caa5d Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.0-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.4.0.tar.gz b/libs/libcommon/dist/libcommon-0.4.0.tar.gz new file mode 100644 index 00000000..5b801885 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.0.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.4.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.1-py3-none-any.whl new file mode 100644 index 00000000..84ef4ebe Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.1-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.4.1.tar.gz b/libs/libcommon/dist/libcommon-0.4.1.tar.gz new file mode 100644 index 00000000..2f4967b4 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.1.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.4.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.2-py3-none-any.whl new file mode 100644 index 00000000..decc0817 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.2-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.4.2.tar.gz b/libs/libcommon/dist/libcommon-0.4.2.tar.gz new file mode 100644 index 00000000..7d41e1d1 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.2.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.4.3-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.4.3-py3-none-any.whl new file mode 100644 index 00000000..f64d9d60 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.3-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.4.3.tar.gz b/libs/libcommon/dist/libcommon-0.4.3.tar.gz new file mode 100644 index 00000000..870b19e3 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.4.3.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl new file mode 100644 index 00000000..ec1e021a Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.0.tar.gz b/libs/libcommon/dist/libcommon-0.5.0.tar.gz new file mode 100644 index 00000000..ba909997 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.0.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl new file mode 100644 index 00000000..c23d6a99 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.5.1.tar.gz b/libs/libcommon/dist/libcommon-0.5.1.tar.gz new file mode 100644 index 00000000..ba741fee Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.5.1.tar.gz differ diff --git a/libs/libcommon/poetry.lock b/libs/libcommon/poetry.lock index 0ca48d52..7452e963 100644 --- a/libs/libcommon/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -0,0 +1,8 @@ +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" +optional = false +python-versions = "*" + @@ -77 +85 @@ description = "Python package for providing Mozilla's CA Bundle." -category = "dev" +category = "main" @@ -85 +93 @@ description = "The Real First Universal Charset Detector. Open, modern and activ -category = "dev" +category = "main" @@ -107 +115 @@ description = "Cross-platform colored terminal text." -category = "dev" +category = "main" @@ -146,0 +155,12 @@ toml = ">=0.10.0,<0.11.0" +[[package]] +name = "dnspython" +version = "1.16.0" +description = "DNS toolkit" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] + @@ -175,0 +196,12 @@ test = ["pytest (>=6)"] +[[package]] +name = "filelock" +version = "3.8.0" +description = "A platform independent file lock." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] + @@ -191 +223 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -195 +227 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -228,0 +261,27 @@ lxml = ["lxml"] +[[package]] +name = "huggingface-hub" +version = "0.11.0" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +category = "main" +optional = false +python-versions = ">=3.7.0" + +[package.dependencies] +filelock = "*" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = "*" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] + @@ -233 +292 @@ description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" +category = "main" @@ -291,0 +351,19 @@ python-versions = "*" +[[package]] +name = "mongo-types" +version = "0.15.1" +description = "Type stubs for mongoengine w/ basic support for bson and pymongo" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[[package]] +name = "mongoengine" +version = "0.24.2" +description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pymongo = ">=3.4,<5.0" + @@ -463,0 +542,11 @@ tomlkit = ">=0.7.2,<0.8.0" +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + @@ -490,0 +580,21 @@ plugins = ["importlib-metadata"] +[[package]] +name = "pymongo" +version = "3.13.0" +description = "Python driver for MongoDB <http://www.mongodb.org>" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} + +[package.extras] +aws = ["pymongo-auth-aws (<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] +gssapi = ["pykerberos"] +ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] +zstd = ["zstandard"] + @@ -553 +663 @@ description = "YAML parser and emitter for Python" -category = "dev" +category = "main" @@ -561 +671 @@ description = "Python HTTP for Humans." -category = "dev" +category = "main" @@ -606 +716 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -675,0 +786,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "tqdm" +version = "4.64.1" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + @@ -683,0 +811,8 @@ python-versions = "*" +[[package]] +name = "types-psutil" +version = "5.9.5.5" +description = "Typing stubs for psutil" +category = "dev" +optional = false +python-versions = "*" + @@ -688 +823 @@ description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" +category = "main" @@ -694 +829 @@ name = "urllib3" -version = "1.26.12" +version = "1.26.13" @@ -696 +831 @@ description = "HTTP library with thread-safe connection pooling, file post, and -category = "dev" +category = "main" @@ -698 +833 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" @@ -716 +851 @@ python-versions = "3.9.6" -content-hash = "b42b88278b98e321afc676b0cc0b1f5feef0237af897a41f78f74432a935ae98" +content-hash = "c41aa118d317c52903d9074626aa783b8a876edc79a4c0a3f619471f175e8c39" @@ -718,0 +854,4 @@ content-hash = "b42b88278b98e321afc676b0cc0b1f5feef0237af897a41f78f74432a935ae98 +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] @@ -829,0 +969,4 @@ cyclonedx-python-lib = [ +dnspython = [ + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, +] @@ -837,0 +981,4 @@ exceptiongroup = [ +filelock = [ + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, +] @@ -843,2 +990,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -853,0 +1001,4 @@ html5lib = [ +huggingface-hub = [ + {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, + {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, +] @@ -877,0 +1029,8 @@ mccabe = [ +mongo-types = [ + {file = "mongo-types-0.15.1.tar.gz", hash = "sha256:0a9deeb7733ea7da5db3711d92e22d93556b522f860bbff82e5df44c53bd06a9"}, + {file = "mongo_types-0.15.1-py3-none-any.whl", hash = "sha256:9417ae5b9a759c09630b5ec7d66904cc333c2d2fcfe75e2760a332ed5e267309"}, +] +mongoengine = [ + {file = "mongoengine-0.24.2-py3-none-any.whl", hash = "sha256:f5c4e1b206b2ccffe4adc7a6283ed26dd799bd115a5fb1d2e885a075132cdb88"}, + {file = "mongoengine-0.24.2.tar.gz", hash = "sha256:c76d49658575bb995682e2e77c8ef7cda63faf939415b32ee923745d120f8b02"}, +] @@ -1054,0 +1214,16 @@ poetryup = [ +psutil = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] @@ -1066,0 +1242,111 @@ pygments = [ +pymongo = [ + {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, + {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, + {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, + {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, + {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, + {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, + {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, + {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, + {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, + {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, + {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, + {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, + {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, + {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, + {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, + {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, + {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, + {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, + {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, + {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, + {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, + {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, + {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, + {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, + {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, +] @@ -1138,2 +1424,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, @@ -1168,0 +1455,4 @@ tomlkit = [ +tqdm = [ + {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"}, + {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, +] @@ -1200,0 +1491,4 @@ typed-ast = [ +types-psutil = [ + {file = "types-psutil-5.9.5.5.tar.gz", hash = "sha256:4f26fdb2cb064b274cbc6359fba4abf3b3a2993d7d4abc336ad0947568212c62"}, + {file = "types_psutil-5.9.5.5-py3-none-any.whl", hash = "sha256:e576bb81c74f7443b067e94f92435894d5dd561161bec3d6401727b63df009f0"}, +] @@ -1206,2 +1500,2 @@ urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index e600a8d3..63f049c3 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.3.3" +version = "0.5.1" @@ -8,0 +9 @@ license = "Apache-2.0" +appdirs = "^1.4.4" @@ -9,0 +11,3 @@ environs = "^9.5.0" +huggingface-hub = "^0.11.0" +mongo-types = "0.15.1" +mongoengine = "^0.24.1" @@ -10,0 +15,2 @@ orjson = "^3.6.4" +psutil = "^5.9.2" +pymongo = { extras = ["srv"], version = "^3.13.0" } @@ -22,0 +29 @@ pytest-cov = "^2.12.1" +types-psutil = "^5.9.5" diff --git a/libs/libcache/src/libcache/asset.py b/libs/libcommon/src/libcommon/asset.py similarity index 100% rename from libs/libcache/src/libcache/asset.py rename to libs/libcommon/src/libcommon/asset.py diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index 75aeda4d..90ea6102 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -7,0 +8 @@ from environs import Env +from libcommon.asset import init_assets_dir @@ -8,0 +10,3 @@ from libcommon.log import init_logging +from libcommon.processing_graph import ProcessingGraph, ProcessingGraphSpecification +from libcommon.queue import connect_to_queue_database +from libcommon.simple_cache import connect_to_cache_database @@ -28,0 +33,66 @@ class CommonConfig: + + +class CacheConfig: + _assets_directory: Optional[str] + assets_directory: str + mongo_database: str + mongo_url: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("CACHE_"): + self._assets_directory = env.str(name="ASSETS_DIRECTORY", default=None) + self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_cache") + self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") + self.setup() + + def setup(self): + connect_to_cache_database(database=self.mongo_database, host=self.mongo_url) + self.assets_directory = init_assets_dir(assets_directory=self._assets_directory) + + +class QueueConfig: + max_jobs_per_namespace: int + mongo_database: str + mongo_url: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("QUEUE_"): + self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_queue") + self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") + self.max_jobs_per_namespace = env.int(name="MAX_JOBS_PER_NAMESPACE", default=1) + self.setup() + + def setup(self): + connect_to_queue_database(database=self.mongo_database, host=self.mongo_url) + + +class WorkerConfig: + max_load_pct: int + max_memory_pct: int + sleep_seconds: int + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("WORKER_"): + self.max_load_pct = env.int(name="MAX_LOAD_PCT", default=70) + self.max_memory_pct = env.int(name="MAX_MEMORY_PCT", default=80) + self.sleep_seconds = env.int(name="SLEEP_SECONDS", default=15) + + +class ProcessingGraphConfig: + specification: ProcessingGraphSpecification + graph: ProcessingGraph + + def __init__(self): + # TODO: allow passing the graph via env vars + self.specification = { + "/splits": {"input_type": "dataset", "required_by_dataset_viewer": True}, + "/first-rows": {"input_type": "split", "requires": "/splits", "required_by_dataset_viewer": True}, + } + + self.setup() + + def setup(self): + self.graph = ProcessingGraph(self.specification) diff --git a/libs/libcommon/src/libcommon/dataset.py b/libs/libcommon/src/libcommon/dataset.py new file mode 100644 index 00000000..00d66d9c --- /dev/null +++ b/libs/libcommon/src/libcommon/dataset.py @@ -0,0 +1,223 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from http import HTTPStatus +from typing import Literal, Optional + +import requests +from huggingface_hub.hf_api import DatasetInfo, HfApi, build_hf_headers + +from libcommon.exceptions import CustomError + +DatasetErrorCode = Literal[ + "DatasetNotFoundError", + "GatedDisabledError", + "GatedExtraFieldsError", + "PrivateDatasetError", +] + + +class DatasetError(CustomError): + """Base class for dataset exceptions.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: DatasetErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__( + message=message, status_code=status_code, code=str(code), cause=cause, disclose_cause=disclose_cause + ) + + +class DatasetNotFoundError(DatasetError): + """Raised when the dataset does not exist.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="DatasetNotFoundError", + cause=cause, + disclose_cause=False, + ) + + +class GatedDisabledError(DatasetError): + """Raised when the dataset is gated, but disabled.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="GatedDisabledError", + cause=cause, + disclose_cause=False, + ) + + +class GatedExtraFieldsError(DatasetError): + """Raised when the dataset is gated, with extra fields.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="GatedExtraFieldsError", + cause=cause, + disclose_cause=False, + ) + + +class PrivateDatasetError(DatasetError): + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="PrivateDatasetError", + cause=cause, + disclose_cause=False, + ) + + +def ask_access(dataset: str, hf_endpoint: str, hf_token: Optional[str]) -> None: + """ + Ask access to the dataset repository. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `None` + Raises: + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset (private, for example). + - ['~requests.exceptions.HTTPError']: any other error when asking access + """ + path = f"{hf_endpoint}/datasets/{dataset}/ask-access" + r = requests.post(path, headers=build_hf_headers(token=hf_token)) + try: + r.raise_for_status() + except requests.exceptions.HTTPError as err: + if r.status_code == 400: + raise GatedExtraFieldsError( + "The dataset is gated with extra fields: not supported at the moment." + ) from err + if r.status_code == 403: + raise GatedDisabledError("The dataset is gated and access is disabled.") from err + if r.status_code in [401, 404]: + raise DatasetNotFoundError("The dataset does not exist on the Hub, or is private.") from err + raise err + + +def get_dataset_info_for_supported_datasets( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> DatasetInfo: + """ + Get the DatasetInfo of the dataset, after checking if it's supported (no private datasets). + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `DatasetInfo`: the dataset info. + <Tip> + Raises the following errors: + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset (private, for example). + - ['~requests.exceptions.HTTPError']: any other error when asking access + - ['~libcommon.dataset.PrivateDatasetError']: if the dataset is private, since private datasets + are not supported in datasets-server. + </Tip> + """ + ask_access(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) + # ^ should not raise, since it would have raised before when reaching ask_access + if dataset_info.private is True: + raise PrivateDatasetError(f"Dataset '{dataset}' is not supported.") + return dataset_info + + +def get_dataset_git_revision( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> Optional[str]: + """ + Get the git revision of the dataset. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `Union[str, None]`: the dataset git revision (sha) if any. + <Tip> + Raises the following errors: + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset (private, for example). + - ['~requests.exceptions.HTTPError']: any other error when asking access + - ['~libcommon.dataset.PrivateDatasetError']: if the dataset is private, since private datasets + are not supported in datasets-server. + </Tip> + """ + return get_dataset_info_for_supported_datasets(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token).sha + + +def check_support( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> None: + """ + Check if the dataset exists on the Hub and is supported by the datasets-server. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `None` + Raises: + - [`~libcommon.dataset.GatedExtraFieldsError`]: if the dataset is gated, with extra fields. + Programmatic access is not implemented for this type of dataset because there is no easy + way to get the list of extra fields. + - [`~libcommon.dataset.GatedDisabledError`]: if the dataset is gated, but disabled. + - [`~libcommon.dataset.DatasetNotFoundError`]: if the dataset does not exist, or if the + token does not give the sufficient access to the dataset (private, for example). + - ['~requests.exceptions.HTTPError']: any other error when asking access + - ['~libcommon.dataset.PrivateDatasetError']: if the dataset is private, since private datasets + are not supported in datasets-server. + """ + get_dataset_info_for_supported_datasets(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) diff --git a/libs/libcommon/src/libcommon/exceptions.py b/libs/libcommon/src/libcommon/exceptions.py index c2db778d..bada9c29 100644 --- a/libs/libcommon/src/libcommon/exceptions.py +++ b/libs/libcommon/src/libcommon/exceptions.py @@ -3,0 +4 @@ +import logging @@ -23 +24,8 @@ ErrorResponse = Union[ErrorResponseWithoutCause, ErrorResponseWithCause] -class CustomError(Exception): +class LoggedError(Exception): + def __init__(self, message: str): + self.message = message + logging.debug(self.message) + super().__init__(self.message) + + +class CustomError(LoggedError): diff --git a/libs/libcommon/src/libcommon/operations.py b/libs/libcommon/src/libcommon/operations.py new file mode 100644 index 00000000..effe1338 --- /dev/null +++ b/libs/libcommon/src/libcommon/operations.py @@ -0,0 +1,157 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from http import HTTPStatus +from typing import List, Optional + +from libcommon.dataset import check_support +from libcommon.exceptions import LoggedError +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue +from libcommon.simple_cache import DoesNotExist, delete_dataset_responses, get_response + + +class PreviousStepError(LoggedError): + def __init__(self, dataset: str, step: ProcessingStep, config: Optional[str] = None, split: Optional[str] = None): + super().__init__( + f"Response for {step.endpoint} for dataset={dataset}, config={config}, split={split} is an error." + ) + + +def update_dataset( + dataset: str, + init_processing_steps: List[ProcessingStep], + hf_endpoint: str, + hf_token: Optional[str] = None, + force: bool = False, +) -> None: + """ + Update a dataset + + Args: + dataset (str): the dataset + init_processing_steps (List[ProcessingStep]): the processing steps that must be run when updating a dataset + hf_endpoint (str): the HF endpoint + hf_token (Optional[str], optional): The HF token. Defaults to None. + force (bool, optional): Force the update. Defaults to False. + + Returns: None. + + Raises: + - [`~libcommon.dataset.DatasetError`]: if the dataset could not be accessed or is not supported + """ + check_support(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + logging.debug(f"refresh dataset='{dataset}'") + for init_processing_step in init_processing_steps: + if init_processing_step.input_type == "dataset": + Queue(type=init_processing_step.job_type).add_job(dataset=dataset, force=force) + + +def delete_dataset(dataset: str) -> None: + """ + Delete a dataset + + Args: + dataset (str): the dataset + + Returns: None. + """ + logging.debug(f"delete cache for dataset='{dataset}'") + delete_dataset_responses(dataset=dataset) + + +def move_dataset( + from_dataset: str, + to_dataset: str, + init_processing_steps: List[ProcessingStep], + hf_endpoint: str, + hf_token: Optional[str] = None, + force: bool = False, +) -> None: + """ + Move a dataset + + Note that the implementation is simply to add or update the new dataset, then delete the old one in case of + success. + + Args: + from_dataset (str): the dataset to move + to_dataset (str): the destination dataset + init_processing_steps (List[ProcessingStep]): the processing steps that must be run when updating a dataset + hf_endpoint (str): the HF endpoint + hf_token (Optional[str], optional): The HF token. Defaults to None. + force (bool, optional): Force the update. Defaults to False. + + Returns: None. + + Raises: + - [`~libcommon.dataset.DatasetError`]: if the dataset could not be accessed or is not supported + """ + logging.debug(f"move dataset '{from_dataset}' to '{to_dataset}'") + update_dataset( + dataset=to_dataset, + init_processing_steps=init_processing_steps, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + force=force, + ) + # ^ can raise + delete_dataset(dataset=from_dataset) + + +def check_in_process( + processing_step: ProcessingStep, + init_processing_steps: List[ProcessingStep], + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, + config: Optional[str] = None, + split: Optional[str] = None, +) -> None: + """Checks if the processing step is running + + Args: + processing_step (ProcessingStep): the processing step + init_processing_steps (List[ProcessingStep]): the processing steps that must be run when updating a dataset + dataset (str): the dataset + hf_endpoint (str): the HF endpoint + hf_token (Optional[str], optional): The HF token. Defaults to None. + config (Optional[str], optional): The config, if any. Defaults to None. + split (Optional[str], optional): The split, if any. Defaults to None. + + Returns: None. Does not raise if the processing step is running. + + Raises: + - [`~libcommon.operations.PreviousStepError`]: a previous step has an error + - [`~libcommon.dataset.DatasetError`]: if the dataset could not be accessed or is not supported + """ + all_steps = processing_step.get_ancestors() + [processing_step] + if any( + Queue(type=step.job_type).is_job_in_process(dataset=dataset, config=config, split=split) for step in all_steps + ): + # the processing step, or a previous one, is still being computed + return + for step in processing_step.get_ancestors(): + try: + result = get_response(kind=step.cache_kind, dataset=dataset, config=config, split=split) + except DoesNotExist: + # a previous step has not been computed, update the dataset + update_dataset( + dataset=dataset, + init_processing_steps=init_processing_steps, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + ) + return + if result["http_status"] != HTTPStatus.OK: + raise PreviousStepError(dataset=dataset, config=config, split=split, step=step) + # all the dependencies (if any) have been computed successfully, the processing step should be in process + # if the dataset is supported. Check if it is supported and update it if so. + update_dataset( + dataset=dataset, + init_processing_steps=init_processing_steps, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + ) + return diff --git a/libs/libcommon/src/libcommon/processing_graph.py b/libs/libcommon/src/libcommon/processing_graph.py new file mode 100644 index 00000000..2e09d7bf --- /dev/null +++ b/libs/libcommon/src/libcommon/processing_graph.py @@ -0,0 +1,127 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from __future__ import annotations + +from dataclasses import dataclass +from typing import List, Literal, Mapping, Optional, TypedDict + +InputType = Literal["dataset", "split"] + + +class _ProcessingStepSpecification(TypedDict): + input_type: InputType + + +class ProcessingStepSpecification(_ProcessingStepSpecification, total=False): + requires: Optional[str] + required_by_dataset_viewer: Literal[True] + + +@dataclass +class ProcessingStep: + """A dataset processing step. + + It contains the details of: + - the API endpoint + - the cache kind (ie. the key in the cache) + - the job type (ie. the job to run to compute the response) + - the job parameters (mainly: ['dataset'] or ['dataset', 'config', 'split']) + - the immediately previous step required to compute the response + - the list of all the previous steps required to compute the response + - the next steps (the steps which previous step is the current one) + """ + + endpoint: str + input_type: InputType + requires: Optional[str] + required_by_dataset_viewer: bool + parent: Optional[ProcessingStep] + ancestors: List[ProcessingStep] + children: List[ProcessingStep] + + @property + def job_type(self): + """The job type (ie. the job to run to compute the response).""" + return self.endpoint + + @property + def cache_kind(self): + """The cache kind (ie. the key in the cache).""" + return self.endpoint + + def get_ancestors(self) -> List[ProcessingStep]: + """Get all the ancestors previous steps required to compute the response of the given step.""" + if len(self.ancestors) > 0: + return self.ancestors + if self.parent is None: + self.ancestors = [] + else: + parent_ancestors = self.parent.get_ancestors() + if self in parent_ancestors: + raise ValueError(f"Cycle detected between {self.endpoint} and {self.parent.endpoint}") + self.ancestors = parent_ancestors + [self.parent] + return self.ancestors + + +ProcessingGraphSpecification = Mapping[str, ProcessingStepSpecification] + + +class ProcessingGraph: + """A graph of dataset processing steps. + + For now, the steps can have only one parent (immediate previous step), but can have multiple children + (next steps, found automatically by traversing the graph). + The graph can have multiple roots. + + It contains the details of: + - the index of all the steps, identified by their endpoint + - the first step, or roots: they don't have a previous step. This means that they will be computed first when a + dataset is updated. + """ + + steps: Mapping[str, ProcessingStep] + roots: List[ProcessingStep] + required_by_dataset_viewer: List[ProcessingStep] + + def __init__(self, processing_graph_specification: ProcessingGraphSpecification): + # TODO: validate the graph specification: endpoints must start with "/" and use only lowercase letters + self.steps = { + endpoint: ProcessingStep( + endpoint=endpoint, + input_type=specification["input_type"], + requires=specification.get("requires"), + required_by_dataset_viewer=specification.get("required_by_dataset_viewer", False), + parent=None, + ancestors=[], + children=[], + ) + for endpoint, specification in processing_graph_specification.items() + } + self.setup() + + def setup(self) -> None: + """Setup the graph.""" + for step in self.steps.values(): + # Set the parent and the children + if step.requires: + step.parent = self.get_step(step.requires) + step.parent.children.append(step) + # Set the ancestors (allows to check for cycles) + step.get_ancestors() + self.roots = [step for step in self.steps.values() if step.parent is None] + self.required_by_dataset_viewer = [step for step in self.steps.values() if step.required_by_dataset_viewer] + + def get_step(self, endpoint: str) -> ProcessingStep: + """Get a step by its endpoint.""" + if endpoint not in self.steps: + raise ValueError(f"Unknown endpoint: {endpoint}") + return self.steps[endpoint] + + def get_first_steps(self) -> List[ProcessingStep]: + """Get the first steps.""" + return self.roots + + def get_steps_required_by_dataset_viewer(self) -> List[ProcessingStep]: + """Get the steps required by the dataset viewer.""" + return self.required_by_dataset_viewer diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libcommon/src/libcommon/queue.py similarity index 99% rename from libs/libqueue/src/libqueue/queue.py rename to libs/libcommon/src/libcommon/queue.py index c056d341..c4ea39be 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libcommon/src/libcommon/queue.py @@ -90 +90 @@ def get_datetime() -> datetime: -def connect_to_database(database: str, host: str) -> None: +def connect_to_queue_database(database: str, host: str) -> None: diff --git a/libs/libcache/src/libcache/simple_cache.py b/libs/libcommon/src/libcommon/simple_cache.py similarity index 96% rename from libs/libcache/src/libcache/simple_cache.py rename to libs/libcommon/src/libcommon/simple_cache.py index ec529df0..8da700d8 100644 --- a/libs/libcache/src/libcache/simple_cache.py +++ b/libs/libcommon/src/libcommon/simple_cache.py @@ -7 +7 @@ from http import HTTPStatus -from typing import Dict, Generic, List, Optional, Set, Type, TypedDict, TypeVar +from typing import Any, Generic, List, Mapping, Optional, Set, Type, TypedDict, TypeVar @@ -41 +41 @@ class QuerySetManager(Generic[U]): -def connect_to_database(database: str, host: str) -> None: +def connect_to_cache_database(database: str, host: str) -> None: @@ -109 +109 @@ def upsert_response( - content: Dict, + content: Mapping[str, Any], @@ -114 +114 @@ def upsert_response( - details: Optional[Dict] = None, + details: Optional[Mapping[str, Any]] = None, @@ -164 +164 @@ class CacheEntry(CacheEntryWithoutContent): - content: Dict + content: Mapping[str, Any] @@ -206 +206 @@ def get_valid_datasets(kind: str) -> Set[str]: -def get_validity_by_kind(dataset: str) -> Dict[str, bool]: +def get_validity_by_kind(dataset: str) -> Mapping[str, bool]: @@ -291 +291 @@ def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheRepo - - [`~libcache.simple_cache.InvalidCursor`] + - [`~libcommon.simple_cache.InvalidCursor`] @@ -293 +293 @@ def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheRepo - - [`~libcache.simple_cache.InvalidLimit`] + - [`~libcommon.simple_cache.InvalidLimit`] diff --git a/libs/libcommon/src/libcommon/worker.py b/libs/libcommon/src/libcommon/worker.py new file mode 100644 index 00000000..fe4cb621 --- /dev/null +++ b/libs/libcommon/src/libcommon/worker.py @@ -0,0 +1,344 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +import random +import time +from abc import ABC, abstractmethod +from http import HTTPStatus +from typing import Any, Literal, Mapping, Optional + +from packaging import version +from psutil import cpu_count, getloadavg, swap_memory, virtual_memory + +from libcommon.config import CommonConfig, QueueConfig, WorkerConfig +from libcommon.dataset import DatasetNotFoundError, get_dataset_git_revision +from libcommon.exceptions import CustomError +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import EmptyQueueError, Queue, Status +from libcommon.simple_cache import get_response_without_content, upsert_response + + +def parse_version(string_version: str) -> version.Version: + parsed_version = version.parse(string_version) + if isinstance(parsed_version, version.LegacyVersion): + raise ValueError(f"LegacyVersion is not supported: {parsed_version}") + return parsed_version + + +WorkerErrorCode = Literal[ + "ConfigNotFoundError", + "NoGitRevisionError", + "SplitNotFoundError", + "UnexpectedError", +] + + +class WorkerError(CustomError): + """Base class for worker exceptions.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: WorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__( + message=message, status_code=status_code, code=str(code), cause=cause, disclose_cause=disclose_cause + ) + + +class ConfigNotFoundError(WorkerError): + """Raised when the config does not exist.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="ConfigNotFoundError", + cause=cause, + disclose_cause=False, + ) + + +class SplitNotFoundError(WorkerError): + """Raised when the split does not exist.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="SplitNotFoundError", + cause=cause, + disclose_cause=False, + ) + + +class NoGitRevisionError(WorkerError): + """Raised when the git revision returned by huggingface_hub is None.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.NOT_FOUND, + code="NoGitRevisionError", + cause=cause, + disclose_cause=False, + ) + + +class UnexpectedError(WorkerError): + """Raised when the response for the split has not been found.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__( + message=message, + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, + code="UnexpectedError", + cause=cause, + disclose_cause=False, + ) + + +class Worker(ABC): + processing_step: ProcessingStep + queue: Queue + common_config: CommonConfig + queue_config: QueueConfig + worker_config: WorkerConfig + version: str + + def __init__( + self, + processing_step: ProcessingStep, + common_config: CommonConfig, + queue_config: QueueConfig, + worker_config: WorkerConfig, + version: str, + ) -> None: + self.processing_step = processing_step + self.common_config = common_config + self.queue_config = queue_config + self.worker_config = worker_config + self.version = version + self.setup() + + def setup(self) -> None: + self.queue = Queue( + type=self.processing_step.job_type, max_jobs_per_namespace=self.queue_config.max_jobs_per_namespace + ) + + def log(self, level: int, msg: str) -> None: + logging.log(level=level, msg=f"[{self.processing_step.endpoint}] {msg}") + + def debug(self, msg: str) -> None: + self.log(level=logging.DEBUG, msg=msg) + + def info(self, msg: str) -> None: + self.log(level=logging.INFO, msg=msg) + + def critical(self, msg: str) -> None: + self.log(level=logging.CRITICAL, msg=msg) + + def exception(self, msg: str) -> None: + self.log(level=logging.ERROR, msg=msg) + + def has_memory(self) -> bool: + if self.worker_config.max_memory_pct <= 0: + return True + virtual_memory_used: int = virtual_memory().used # type: ignore + virtual_memory_total: int = virtual_memory().total # type: ignore + percent = (swap_memory().used + virtual_memory_used) / (swap_memory().total + virtual_memory_total) + ok = percent < self.worker_config.max_memory_pct + if not ok: + self.info( + f"memory usage (RAM + SWAP) is too high: {percent:.0f}% - max is {self.worker_config.max_memory_pct}%" + ) + return ok + + def has_cpu(self) -> bool: + if self.worker_config.max_load_pct <= 0: + return True + load_pct = max(getloadavg()[:2]) / cpu_count() * 100 + # ^ only current load and 5m load. 15m load is not relevant to decide to launch a new job + ok = load_pct < self.worker_config.max_load_pct + if not ok: + self.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.worker_config.max_load_pct}%") + return ok + + def sleep(self) -> None: + jitter = 0.75 + random.random() / 2 # nosec + # ^ between 0.75 and 1.25 + duration = self.worker_config.sleep_seconds * jitter + self.debug(f"sleep during {duration:.2f} seconds") + time.sleep(duration) + + def loop(self) -> None: + try: + while True: + if self.has_memory() and self.has_cpu() and self.process_next_job(): + # loop immediately to try another job + # see https://github.com/huggingface/datasets-server/issues/265 + continue + self.sleep() + except BaseException as e: + self.critical(f"quit due to an uncaught error while processing the job: {e}") + raise + + def process_next_job(self) -> bool: + self.debug("try to process a job") + + try: + started_job_info = self.queue.start_job() + job_id = started_job_info["job_id"] + dataset = started_job_info["dataset"] + config = started_job_info["config"] + split = started_job_info["split"] + force = started_job_info["force"] + parameters_for_log = "dataset={dataset}" + ("" if split is None else f"config={config} split={split}") + self.debug(f"job assigned: {job_id} for {parameters_for_log}") + except EmptyQueueError: + self.debug("no job in the queue") + return False + + finished_status: Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED] + try: + self.info(f"compute {parameters_for_log}") + if self.should_skip_job(dataset=dataset, config=config, split=split, force=force): + finished_status = Status.SKIPPED + else: + self.process(dataset=dataset, config=config, split=split, force=force) + finished_status = Status.SUCCESS + except Exception: + self.exception(f"error while computing {parameters_for_log}") + finished_status = Status.ERROR + finally: + self.queue.finish_job(job_id=job_id, finished_status=finished_status) + self.debug(f"job finished with {finished_status.value}: {job_id} for {parameters_for_log}") + return True + + def compare_major_version(self, other_version: str) -> int: + """ + Compare the major version of worker's self version and the other version's. + + Args: + other_version (:obj:`str`): the other semantic version + + Returns: + :obj:`int`: the difference between the major version of both versions. + 0 if they are equal. Negative if worker's major version is lower than other_version, positive otherwise. + Raises: + :obj:`ValueError`: if worker's version or other_version is not a valid semantic version. + """ + try: + return parse_version(self.version).major - parse_version(other_version).major + except Exception as err: + raise RuntimeError(f"Could not get major versions: {err}") from err + + def should_skip_job( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> bool: + """Return True if the job should be skipped, False otherwise. + + The job must be skipped if: + - force is False + - and a cache entry exists for the dataset + - and the result was successful + - and it has been created with the same major version of the worker + - and it has been created with the exact same git commit of the dataset repository + + Args: + dataset (:obj:`str`): The name of the dataset. + config (:obj:`str`, `optional`): The name of the configuration. + split (:obj:`str`, `optional`): The name of the split. + force (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to force the job to be run. + + Returns: + :obj:`bool`: True if the job should be skipped, False otherwise. + """ + if force or config is None or split is None: + return False + try: + cached_response = get_response_without_content( + kind=self.processing_step.cache_kind, dataset=dataset, config=config, split=split + ) + dataset_git_revision = get_dataset_git_revision( + dataset=dataset, hf_endpoint=self.common_config.hf_endpoint, hf_token=self.common_config.hf_token + ) + return ( + # TODO: use "error_code" to decide if the job should be skipped (ex: retry if temporary error) + cached_response["http_status"] == HTTPStatus.OK + and cached_response["worker_version"] is not None + and self.compare_major_version(cached_response["worker_version"]) == 0 + and cached_response["dataset_git_revision"] is not None + and cached_response["dataset_git_revision"] == dataset_git_revision + ) + except Exception: + return False + + def process( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> bool: + dataset_git_revision = None + try: + dataset_git_revision = get_dataset_git_revision( + dataset=dataset, hf_endpoint=self.common_config.hf_endpoint, hf_token=self.common_config.hf_token + ) + if dataset_git_revision is None: + self.debug(f"the dataset={dataset} has no git revision, don't update the cache") + raise NoGitRevisionError(f"Could not get git revision for dataset {dataset}") + content = self.compute(dataset=dataset, config=config, split=split, force=force) + upsert_response( + kind=self.processing_step.cache_kind, + dataset=dataset, + config=config, + split=split, + content=content, + http_status=HTTPStatus.OK, + worker_version=self.version, + dataset_git_revision=dataset_git_revision, + ) + self.debug(f"dataset={dataset} config={config} split={split} is valid, cache updated") + return True + except ( + DatasetNotFoundError, + ConfigNotFoundError, + SplitNotFoundError, + ): + # To avoid filling the cache, we don't save these errors. Otherwise, DoS is possible. + self.debug( + f"the dataset={dataset}, config {config} or split {split} could not be found, don't update the cache" + ) + return False + except Exception as err: + e = err if isinstance(err, CustomError) else UnexpectedError(str(err), err) + upsert_response( + kind=self.processing_step.cache_kind, + dataset=dataset, + config=config, + split=split, + content=dict(e.as_response()), + http_status=e.status_code, + error_code=e.code, + details=dict(e.as_response_with_cause()), + worker_version=self.version, + dataset_git_revision=dataset_git_revision, + ) + self.debug(f"response for dataset={dataset} config={config} split={split} had an error, cache updated") + return False + + @abstractmethod + def compute( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> Mapping[str, Any]: + pass diff --git a/libs/libcommon/tests/conftest.py b/libs/libcommon/tests/conftest.py index 404c08b6..204ccd2e 100644 --- a/libs/libcommon/tests/conftest.py +++ b/libs/libcommon/tests/conftest.py @@ -4 +4 @@ -import pytest +from pytest import MonkeyPatch, fixture @@ -6 +6,2 @@ import pytest -from libcommon.config import CommonConfig +from libcommon.config import CacheConfig, CommonConfig, QueueConfig, WorkerConfig +from libcommon.processing_graph import ProcessingStep @@ -9 +10 @@ from libcommon.config import CommonConfig [email protected](scope="session") +@fixture(scope="session") @@ -11,0 +13,44 @@ def common_config(): + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session", autouse=True) +def cache_config(monkeypatch_session: MonkeyPatch) -> CacheConfig: + cache_config = CacheConfig() + if "test" not in cache_config.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return cache_config + + +@fixture(scope="session", autouse=True) +def queue_config(monkeypatch_session: MonkeyPatch) -> QueueConfig: + queue_config = QueueConfig() + if "test" not in queue_config.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return queue_config + + +@fixture(scope="session", autouse=True) +def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: + return WorkerConfig() + + +@fixture(scope="session") +def test_processing_step(monkeypatch_session: MonkeyPatch) -> ProcessingStep: + return ProcessingStep( + endpoint="/test", + input_type="dataset", + requires=None, + required_by_dataset_viewer=False, + parent=None, + ancestors=[], + children=[], + ) diff --git a/libs/libcommon/tests/test_processing_steps.py b/libs/libcommon/tests/test_processing_steps.py new file mode 100644 index 00000000..3489cbde --- /dev/null +++ b/libs/libcommon/tests/test_processing_steps.py @@ -0,0 +1,27 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from libcommon.config import ProcessingGraphConfig + + +def test_default_graph(): + config = ProcessingGraphConfig() + graph = config.graph + + splits = graph.get_step("/splits") + first_rows = graph.get_step("/first-rows") + + assert splits is not None + assert first_rows is not None + + assert splits.parent is None + assert first_rows.parent is splits + + assert splits.children == [first_rows] + assert first_rows.children == [] + + assert splits.get_ancestors() == [] + assert first_rows.get_ancestors() == [splits] + + assert graph.get_first_steps() == [splits] + assert graph.get_steps_required_by_dataset_viewer() == [splits, first_rows] diff --git a/libs/libqueue/tests/test_queue.py b/libs/libcommon/tests/test_queue.py similarity index 98% rename from libs/libqueue/tests/test_queue.py rename to libs/libcommon/tests/test_queue.py index 26d771ed..d985c1fc 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libcommon/tests/test_queue.py @@ -8 +8 @@ import pytest -from libqueue.queue import EmptyQueueError, Queue, Status, _clean_queue_database +from libcommon.queue import EmptyQueueError, Queue, Status, _clean_queue_database diff --git a/libs/libcache/tests/test_simple_cache.py b/libs/libcommon/tests/test_simple_cache.py similarity index 99% rename from libs/libcache/tests/test_simple_cache.py rename to libs/libcommon/tests/test_simple_cache.py index 1e8751de..e11459e3 100644 --- a/libs/libcache/tests/test_simple_cache.py +++ b/libs/libcommon/tests/test_simple_cache.py @@ -11 +11 @@ from pymongo.errors import DocumentTooLarge -from libcache.simple_cache import ( +from libcommon.simple_cache import ( diff --git a/libs/libcommon/tests/test_worker.py b/libs/libcommon/tests/test_worker.py new file mode 100644 index 00000000..86702cb2 --- /dev/null +++ b/libs/libcommon/tests/test_worker.py @@ -0,0 +1,85 @@ +from typing import Any, Mapping, Optional + +import pytest + +from libcommon.config import CommonConfig, QueueConfig, WorkerConfig +from libcommon.processing_graph import ProcessingStep +from libcommon.worker import Worker, parse_version + + +class DummyWorker(Worker): + def compute( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> Mapping[str, Any]: + return {"key": "value"} + + [email protected]( + "string_version, expected_major_version, should_raise", + [ + ("1.0.0", 1, False), + ("3.1.2", 3, False), + ("1.1", 1, False), + ("not a version", None, True), + ], +) +def test_parse_version(string_version: str, expected_major_version: int, should_raise: bool) -> None: + if should_raise: + with pytest.raises(Exception): + parse_version(string_version) + else: + assert parse_version(string_version).major == expected_major_version + + [email protected]( + "worker_version, other_version, expected, should_raise", + [ + ("1.0.0", "1.0.1", 0, False), + ("1.0.0", "2.0.1", -1, False), + ("2.0.0", "1.0.1", 1, False), + ("not a version", "1.0.1", None, True), + ], +) +def test_compare_major_version( + test_processing_step: ProcessingStep, + common_config: CommonConfig, + queue_config: QueueConfig, + worker_config: WorkerConfig, + worker_version: str, + other_version: str, + expected: int, + should_raise: bool, +) -> None: + worker = DummyWorker( + processing_step=test_processing_step, + common_config=common_config, + queue_config=queue_config, + worker_config=worker_config, + version=worker_version, + ) + if should_raise: + with pytest.raises(Exception): + worker.compare_major_version(other_version) + else: + assert worker.compare_major_version(other_version) == expected + + +def should_skip_job( + hub_public_csv: str, + test_processing_step: ProcessingStep, + common_config: CommonConfig, + queue_config: QueueConfig, + worker_config: WorkerConfig, +) -> None: + worker = DummyWorker( + processing_step=test_processing_step, + common_config=common_config, + queue_config=queue_config, + worker_config=worker_config, + version="1.0.0", + ) + dataset = hub_public_csv + assert worker.should_skip_job(dataset=dataset) is False + # we add an entry to the cache + worker.process(dataset=dataset) + assert worker.should_skip_job(dataset=dataset) is True diff --git a/libs/libqueue/.flake8 b/libs/libqueue/.flake8 deleted file mode 100644 index f7d6157c..00000000 --- a/libs/libqueue/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -# Recommend matching the black line length (119), -# rather than using the flake8 default of 79: -max-line-length = 119 -extend-ignore = "E203" diff --git a/libs/libqueue/.python-version b/libs/libqueue/.python-version deleted file mode 100644 index 1635d0f5..00000000 --- a/libs/libqueue/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.6 diff --git a/libs/libqueue/Makefile b/libs/libqueue/Makefile deleted file mode 100644 index d7bb6a5d..00000000 --- a/libs/libqueue/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -# environment variables for the commands (docker-compose, poetry) -export COMPOSE_PROJECT_NAME := libqueue -export MONGO_PORT := 27021 -export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} -# makefile variables -DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml - -include ../../tools/Python.mk -include ../../tools/PythonAudit.mk -include ../../tools/PythonTest.mk -include ../../tools/Docker.mk diff --git a/libs/libqueue/README.md b/libs/libqueue/README.md deleted file mode 100644 index 4f00eb04..00000000 --- a/libs/libqueue/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# libqueue - -A Python library to manage the job queues to precompute API responses. The job queues are stored in a mongo database. - -## Configuration - -Set environment variables to configure the following aspects: - -- `QUEUE_MAX_JOBS_PER_NAMESPACE`: the maximum number of started jobs for the same namespace (the user or organization, before the `/` separator in the dataset name, or the "canonical" dataset name if not present). Defaults to 1. -- `QUEUE_MAX_LOAD_PCT`: the maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of cpus \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. -- `QUEUE_MAX_MEMORY_PCT`: the maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. -- `QUEUE_MONGO_DATABASE`: the name of the database used for storing the queue. Defaults to `"datasets_server_queue"`. -- `QUEUE_MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. -- `QUEUE_SLEEP_SECONDS`: duration in seconds of a worker wait loop iteration, before checking if resources are available and processing a job if any is available. Note that the worker does not sleep on the first loop after finishing a job. Defaults to `15`. diff --git a/libs/libqueue/dist/libqueue-0.1.0-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.0-py3-none-any.whl deleted file mode 100644 index 7d1e5540..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.0.tar.gz b/libs/libqueue/dist/libqueue-0.1.0.tar.gz deleted file mode 100644 index ec0794bf..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.0.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.1-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.1-py3-none-any.whl deleted file mode 100644 index c825f310..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.1.tar.gz b/libs/libqueue/dist/libqueue-0.1.1.tar.gz deleted file mode 100644 index 30d8406a..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.1.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.10-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.10-py3-none-any.whl deleted file mode 100644 index 26f147e6..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.10-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.10.tar.gz b/libs/libqueue/dist/libqueue-0.1.10.tar.gz deleted file mode 100644 index e19bb91b..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.10.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.11-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.11-py3-none-any.whl deleted file mode 100644 index 1fd43552..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.11-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.11.tar.gz b/libs/libqueue/dist/libqueue-0.1.11.tar.gz deleted file mode 100644 index 790fecbf..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.11.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.2-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.2-py3-none-any.whl deleted file mode 100644 index c8d411ee..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.2.tar.gz b/libs/libqueue/dist/libqueue-0.1.2.tar.gz deleted file mode 100644 index ea9f0aeb..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.2.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.3-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.3-py3-none-any.whl deleted file mode 100644 index 3431ba2f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.3.tar.gz b/libs/libqueue/dist/libqueue-0.1.3.tar.gz deleted file mode 100644 index 83c0549b..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.3.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.4-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.4-py3-none-any.whl deleted file mode 100644 index d6508b4c..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.4.tar.gz b/libs/libqueue/dist/libqueue-0.1.4.tar.gz deleted file mode 100644 index ccd655f0..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.4.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.5-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.5-py3-none-any.whl deleted file mode 100644 index 95de0194..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.5-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.5.tar.gz b/libs/libqueue/dist/libqueue-0.1.5.tar.gz deleted file mode 100644 index 02c1793e..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.5.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.6-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.6-py3-none-any.whl deleted file mode 100644 index 72b52e4b..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.6-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.6.tar.gz b/libs/libqueue/dist/libqueue-0.1.6.tar.gz deleted file mode 100644 index d97f3795..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.6.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.7-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.7-py3-none-any.whl deleted file mode 100644 index e7b52949..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.7-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.7.tar.gz b/libs/libqueue/dist/libqueue-0.1.7.tar.gz deleted file mode 100644 index 6c40d134..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.7.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.8-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.8-py3-none-any.whl deleted file mode 100644 index 4e862649..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.8-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.8.tar.gz b/libs/libqueue/dist/libqueue-0.1.8.tar.gz deleted file mode 100644 index 6f6daab1..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.8.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.9-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.1.9-py3-none-any.whl deleted file mode 100644 index 666e222f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.9-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.1.9.tar.gz b/libs/libqueue/dist/libqueue-0.1.9.tar.gz deleted file mode 100644 index 096a6816..00000000 Binary files a/libs/libqueue/dist/libqueue-0.1.9.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.2.0-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.2.0-py3-none-any.whl deleted file mode 100644 index 16fbf1c6..00000000 Binary files a/libs/libqueue/dist/libqueue-0.2.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.2.0.tar.gz b/libs/libqueue/dist/libqueue-0.2.0.tar.gz deleted file mode 100644 index 497c3bae..00000000 Binary files a/libs/libqueue/dist/libqueue-0.2.0.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.3.0-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.3.0-py3-none-any.whl deleted file mode 100644 index fac72ba5..00000000 Binary files a/libs/libqueue/dist/libqueue-0.3.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.3.0.tar.gz b/libs/libqueue/dist/libqueue-0.3.0.tar.gz deleted file mode 100644 index d9bb70e8..00000000 Binary files a/libs/libqueue/dist/libqueue-0.3.0.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl deleted file mode 100644 index 02516f1f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.3.1.tar.gz b/libs/libqueue/dist/libqueue-0.3.1.tar.gz deleted file mode 100644 index f9eb0076..00000000 Binary files a/libs/libqueue/dist/libqueue-0.3.1.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl deleted file mode 100644 index cabb1360..00000000 Binary files a/libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.3.2.tar.gz b/libs/libqueue/dist/libqueue-0.3.2.tar.gz deleted file mode 100644 index 1367e3ba..00000000 Binary files a/libs/libqueue/dist/libqueue-0.3.2.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.0-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.0-py3-none-any.whl deleted file mode 100644 index 9e8e9a71..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.0-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.0.tar.gz b/libs/libqueue/dist/libqueue-0.4.0.tar.gz deleted file mode 100644 index d2010c6c..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.0.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl deleted file mode 100644 index 5fda0253..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.1.tar.gz b/libs/libqueue/dist/libqueue-0.4.1.tar.gz deleted file mode 100644 index df179d6f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.1.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl deleted file mode 100644 index a0de00e7..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.10.tar.gz b/libs/libqueue/dist/libqueue-0.4.10.tar.gz deleted file mode 100644 index a2ea9a22..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.10.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl deleted file mode 100644 index 38a06c0b..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.11.tar.gz b/libs/libqueue/dist/libqueue-0.4.11.tar.gz deleted file mode 100644 index faf31fa8..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.11.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.12-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.12-py3-none-any.whl deleted file mode 100644 index 8f614c62..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.12-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.12.tar.gz b/libs/libqueue/dist/libqueue-0.4.12.tar.gz deleted file mode 100644 index ee8d73cb..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.12.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl deleted file mode 100644 index 6bee9b1f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.13.tar.gz b/libs/libqueue/dist/libqueue-0.4.13.tar.gz deleted file mode 100644 index 8303ca84..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.13.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl deleted file mode 100644 index 7a905795..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.2.tar.gz b/libs/libqueue/dist/libqueue-0.4.2.tar.gz deleted file mode 100644 index b4baf64d..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.2.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.3-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.3-py3-none-any.whl deleted file mode 100644 index c182b934..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.3-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.3.tar.gz b/libs/libqueue/dist/libqueue-0.4.3.tar.gz deleted file mode 100644 index a4ad1859..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.3.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.4-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.4-py3-none-any.whl deleted file mode 100644 index 9604b84f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.4-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.4.tar.gz b/libs/libqueue/dist/libqueue-0.4.4.tar.gz deleted file mode 100644 index 4ceaf592..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.4.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.5-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.5-py3-none-any.whl deleted file mode 100644 index 3366aa31..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.5-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.5.tar.gz b/libs/libqueue/dist/libqueue-0.4.5.tar.gz deleted file mode 100644 index 9c35a07e..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.5.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl deleted file mode 100644 index 98fe99d9..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.6.tar.gz b/libs/libqueue/dist/libqueue-0.4.6.tar.gz deleted file mode 100644 index b8660a45..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.6.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl deleted file mode 100644 index 871c198f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.7.tar.gz b/libs/libqueue/dist/libqueue-0.4.7.tar.gz deleted file mode 100644 index 06f35d3f..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.7.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl deleted file mode 100644 index c3f41386..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.8.tar.gz b/libs/libqueue/dist/libqueue-0.4.8.tar.gz deleted file mode 100644 index 5853480c..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.8.tar.gz and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.9-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.9-py3-none-any.whl deleted file mode 100644 index 014af2e9..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.9-py3-none-any.whl and /dev/null differ diff --git a/libs/libqueue/dist/libqueue-0.4.9.tar.gz b/libs/libqueue/dist/libqueue-0.4.9.tar.gz deleted file mode 100644 index f5ad48d6..00000000 Binary files a/libs/libqueue/dist/libqueue-0.4.9.tar.gz and /dev/null differ diff --git a/libs/libqueue/poetry.lock b/libs/libqueue/poetry.lock deleted file mode 100644 index 428e4308..00000000 --- a/libs/libqueue/poetry.lock +++ /dev/null @@ -1,1367 +0,0 @@ -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - -[[package]] -name = "black" -version = "22.10.0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cachecontrol" -version = "0.12.11" -description = "httplib2 caching for requests" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -lockfile = {version = ">=0.9", optional = true, markers = "extra == \"filecache\""} -msgpack = ">=0.5.2" -requests = "*" - -[package.extras] -filecache = ["lockfile (>=0.9)"] -redis = ["redis (>=2.10.5)"] - -[[package]] -name = "certifi" -version = "2022.9.24" -description = "Python package for providing Mozilla's CA Bundle." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.1.3" -description = "Composable command line interface toolkit" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" - -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cyclonedx-python-lib" -version = "3.1.0" -description = "A library for producing CycloneDX SBOM (Software Bill of Materials) files." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -packageurl-python = ">=0.9" -setuptools = ">=47.0.0" -sortedcontainers = ">=2.4.0,<3.0.0" -toml = ">=0.10.0,<0.11.0" - -[[package]] -name = "dnspython" -version = "1.16.0" -description = "DNS toolkit" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -dnssec = ["ecdsa (>=0.13)", "pycryptodome"] -idna = ["idna (>=2.1)"] - -[[package]] -name = "environs" -version = "9.5.0" -description = "simplified environment variable parsing" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -marshmallow = ">=3.0.0" -python-dotenv = "*" - -[package.extras] -dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] -tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] - -[[package]] -name = "exceptiongroup" -version = "1.0.4" -description = "Backport of PEP 654 (exception groups)" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[[package]] -name = "html5lib" -version = "1.1" -description = "HTML parser based on the WHATWG HTML specification" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -six = ">=1.9" -webencodings = "*" - -[package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "isort" -version = "5.10.1" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.6.1,<4.0" - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "lockfile" -version = "0.12.2" -description = "Platform-independent file locking module" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "marshmallow" -version = "3.19.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mongo-types" -version = "0.15.1" -description = "Type stubs for mongoengine w/ basic support for bson and pymongo" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[[package]] -name = "mongoengine" -version = "0.24.2" -description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pymongo = ">=3.4,<5.0" - -[[package]] -name = "msgpack" -version = "1.0.4" -description = "MessagePack serializer" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mypy" -version = "0.812" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -typed-ast = ">=1.4.0,<1.5.0" -typing-extensions = ">=3.7.4" - -[package.extras] -dmypy = ["psutil (>=4.0)"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "packageurl-python" -version = "0.10.4" -description = "A purl aka. Package URL parser and builder" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -build = ["wheel"] -test = ["black", "isort", "pytest"] - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pathspec" -version = "0.10.2" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pbr" -version = "5.11.0" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" - -[[package]] -name = "pip" -version = "22.3.1" -description = "The PyPA recommended tool for installing Python packages." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pip-api" -version = "0.0.30" -description = "An unofficial, importable pip API" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pip = "*" - -[[package]] -name = "pip-audit" -version = "2.4.6" -description = "A tool for scanning Python environments for known vulnerabilities" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -CacheControl = {version = ">=0.12.10", extras = ["filecache"]} -cyclonedx-python-lib = ">=2.0.0,<2.5.0 || >2.5.0" -html5lib = ">=1.1" -packaging = ">=21.0.0" -pip-api = ">=0.0.28" -pip-requirements-parser = ">=31.2.0" -resolvelib = ">=0.8.0" -rich = ">=12.4" -toml = ">=0.10" - -[package.extras] -dev = ["build", "bump (>=1.3.2)", "pip-audit[lint,test]"] -lint = ["black (>=22.3.0)", "flake8", "interrogate", "isort", "mypy", "pdoc3", "types-html5lib", "types-requests", "types-toml"] -test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] - -[[package]] -name = "pip-requirements-parser" -version = "31.2.0" -description = "pip requirements parser - a mostly correct pip requirements parsing library because it uses pip's own code." -category = "dev" -optional = false -python-versions = ">=3.6.*" - -[package.dependencies] -packaging = "*" - -[package.extras] -docs = ["Sphinx (>=3.3.1)", "doc8 (>=0.8.1)", "sphinx-rtd-theme (>=0.5.0)"] -testing = ["pytest (>=6)", "pytest-xdist (>=2)"] - -[[package]] -name = "platformdirs" -version = "2.5.4" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "poetryup" -version = "0.3.15" -description = "Update dependencies and bump their version in the pyproject.toml file" -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -tomlkit = ">=0.7.2,<0.8.0" - -[[package]] -name = "psutil" -version = "5.9.4" -description = "Cross-platform lib for process and system monitoring in Python." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pymongo" -version = "3.13.0" -description = "Python driver for MongoDB <http://www.mongodb.org>" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} - -[package.extras] -aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] -gssapi = ["pykerberos"] -ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] -snappy = ["python-snappy"] -srv = ["dnspython (>=1.16.0,<1.17.0)"] -tls = ["ipaddress"] -zstd = ["zstandard"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "2.12.1" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -coverage = ">=5.2.1" -pytest = ">=4.6" -toml = "*" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dotenv" -version = "0.21.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "dev" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "resolvelib" -version = "0.9.0" -description = "Resolve abstract dependencies into concrete ones" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -examples = ["html5lib", "packaging", "pygraphviz", "requests"] -lint = ["black", "flake8", "isort", "mypy", "types-requests"] -release = ["build", "towncrier", "twine"] -test = ["commentjson", "packaging", "pytest"] - -[[package]] -name = "rich" -version = "12.6.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" -optional = false -python-versions = ">=3.6.3,<4.0.0" - -[package.dependencies] -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] - -[[package]] -name = "setuptools" -version = "65.6.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "stevedore" -version = "4.1.1" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tomlkit" -version = "0.7.2" -description = "Style preserving TOML library" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "typed-ast" -version = "1.4.3" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-psutil" -version = "5.9.5.5" -description = "Typing stubs for psutil" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" - -[metadata] -lock-version = "1.1" -python-versions = "3.9.6" -content-hash = "03e5ffad91f3c5ecb6155f80e69e77d6fc5a77497f47056361ddf900f2236f59" - -[metadata.files] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -bandit = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -cachecontrol = [ - {file = "CacheControl-0.12.11-py2.py3-none-any.whl", hash = "sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b"}, - {file = "CacheControl-0.12.11.tar.gz", hash = "sha256:a5b9fcc986b184db101aa280b42ecdcdfc524892596f606858e0b7a8b4d9e144"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -commonmark = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -cyclonedx-python-lib = [ - {file = "cyclonedx-python-lib-3.1.0.tar.gz", hash = "sha256:39e9d36347d4dc736474ab4f3a7cd7bc91050c9315df698f83a6d8bbcb290744"}, - {file = "cyclonedx_python_lib-3.1.0-py3-none-any.whl", hash = "sha256:3c79f32bb7d6ed34eac3308dbc8f2a77fbd1fd3779991173a147d866eaa7423e"}, -] -dnspython = [ - {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, - {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, -] -environs = [ - {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, - {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, -] -exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, -] -flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] -gitdb = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] -gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, -] -html5lib = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -lockfile = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] -marshmallow = [ - {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, - {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mongo-types = [ - {file = "mongo-types-0.15.1.tar.gz", hash = "sha256:0a9deeb7733ea7da5db3711d92e22d93556b522f860bbff82e5df44c53bd06a9"}, - {file = "mongo_types-0.15.1-py3-none-any.whl", hash = "sha256:9417ae5b9a759c09630b5ec7d66904cc333c2d2fcfe75e2760a332ed5e267309"}, -] -mongoengine = [ - {file = "mongoengine-0.24.2-py3-none-any.whl", hash = "sha256:f5c4e1b206b2ccffe4adc7a6283ed26dd799bd115a5fb1d2e885a075132cdb88"}, - {file = "mongoengine-0.24.2.tar.gz", hash = "sha256:c76d49658575bb995682e2e77c8ef7cda63faf939415b32ee923745d120f8b02"}, -] -msgpack = [ - {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, - {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, - {file = "msgpack-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa"}, - {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e"}, - {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db"}, - {file = "msgpack-1.0.4-cp310-cp310-win32.whl", hash = "sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef"}, - {file = "msgpack-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075"}, - {file = "msgpack-1.0.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9"}, - {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6"}, - {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae"}, - {file = "msgpack-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6"}, - {file = "msgpack-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661"}, - {file = "msgpack-1.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227"}, - {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e"}, - {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236"}, - {file = "msgpack-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44"}, - {file = "msgpack-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab"}, - {file = "msgpack-1.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e"}, - {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43"}, - {file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243"}, - {file = "msgpack-1.0.4-cp38-cp38-win32.whl", hash = "sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2"}, - {file = "msgpack-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55"}, - {file = "msgpack-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92"}, - {file = "msgpack-1.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8"}, - {file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae"}, - {file = "msgpack-1.0.4-cp39-cp39-win32.whl", hash = "sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c"}, - {file = "msgpack-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce"}, - {file = "msgpack-1.0.4.tar.gz", hash = "sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f"}, -] -mypy = [ - {file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"}, - {file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"}, - {file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"}, - {file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"}, - {file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"}, - {file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"}, - {file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"}, - {file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"}, - {file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"}, - {file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"}, - {file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"}, - {file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"}, - {file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"}, - {file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"}, - {file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"}, - {file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"}, - {file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"}, - {file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"}, - {file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"}, - {file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"}, - {file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"}, - {file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packageurl-python = [ - {file = "packageurl-python-0.10.4.tar.gz", hash = "sha256:5c91334f942cd55d45eb0c67dd339a535ef90e25f05b9ec016ad188ed0ef9048"}, - {file = "packageurl_python-0.10.4-py3-none-any.whl", hash = "sha256:bf8a1ffe755634776f6563904d792fb0aa13b377fc86115c36fe17f69b6e59db"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, -] -pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, -] -pip = [ - {file = "pip-22.3.1-py3-none-any.whl", hash = "sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077"}, - {file = "pip-22.3.1.tar.gz", hash = "sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38"}, -] -pip-api = [ - {file = "pip-api-0.0.30.tar.gz", hash = "sha256:a05df2c7aa9b7157374bcf4273544201a0c7bae60a9c65bcf84f3959ef3896f3"}, - {file = "pip_api-0.0.30-py3-none-any.whl", hash = "sha256:2a0314bd31522eb9ffe8a99668b0d07fee34ebc537931e7b6483001dbedcbdc9"}, -] -pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, -] -pip-requirements-parser = [ - {file = "pip-requirements-parser-31.2.0.tar.gz", hash = "sha256:8c2a6f8e091ac2693824a5ef4e3b250226e34f74a20a91a87b9ab0714b47788f"}, - {file = "pip_requirements_parser-31.2.0-py3-none-any.whl", hash = "sha256:22fa213a987913385b2484d5698ecfa1d9cf4154978cdf929085548af55355b0"}, -] -platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -poetryup = [ - {file = "poetryup-0.3.15-py3-none-any.whl", hash = "sha256:db068f55d10c0f89c76ea2b62c6bb81c0b0512454f7a83bdc0a13c146e5fb13e"}, - {file = "poetryup-0.3.15.tar.gz", hash = "sha256:efa4e7bb0cd005db4aff3cc678c8bfba9474ef42d5759c0168f2a55fc0f17bc3"}, -] -psutil = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] -pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] -pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] -pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pymongo = [ - {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, - {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, - {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, - {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, - {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, - {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, - {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, - {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, - {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, - {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, - {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, - {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, - {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, - {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, - {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, - {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, - {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, - {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, - {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, - {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, - {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, - {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, - {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, - {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, - {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, - {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, - {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, - {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, - {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, - {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, - {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, - {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, - {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, - {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] -pytest-cov = [ - {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, - {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, -] -python-dotenv = [ - {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, - {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -resolvelib = [ - {file = "resolvelib-0.9.0-py2.py3-none-any.whl", hash = "sha256:597adcbdf81d62d0cde55d90faa8e79187ec0f18e5012df30bd7a751b26343ae"}, - {file = "resolvelib-0.9.0.tar.gz", hash = "sha256:40ab05117c3281b1b160105e10075094c5ab118315003c922b77673a365290e1"}, -] -rich = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, -] -setuptools = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -smmap = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] -sortedcontainers = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] -stevedore = [ - {file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"}, - {file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tomlkit = [ - {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"}, - {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"}, -] -typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, -] -types-psutil = [ - {file = "types-psutil-5.9.5.5.tar.gz", hash = "sha256:4f26fdb2cb064b274cbc6359fba4abf3b3a2993d7d4abc336ad0947568212c62"}, - {file = "types_psutil-5.9.5.5-py3-none-any.whl", hash = "sha256:e576bb81c74f7443b067e94f92435894d5dd561161bec3d6401727b63df009f0"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -urllib3 = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, -] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] diff --git a/libs/libqueue/poetry.toml b/libs/libqueue/poetry.toml deleted file mode 100644 index 5fcef8cd..00000000 --- a/libs/libqueue/poetry.toml +++ /dev/null @@ -1,3 +0,0 @@ -[virtualenvs] -in-project = true -prefer-active-python = true diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml deleted file mode 100644 index ee0aa673..00000000 --- a/libs/libqueue/pyproject.toml +++ /dev/null @@ -1,51 +0,0 @@ -[tool.poetry] -authors = ["Sylvain Lesage <[email protected]>"] -description = "Library for the jobs queue in mongodb" -name = "libqueue" -version = "0.4.13" -license = "Apache-2.0" - -[tool.poetry.dependencies] -environs = "^9.5.0" -mongo-types = "0.15.1" -mongoengine = "^0.24.1" -packaging = "^21.3" -psutil = "^5.9.2" -pymongo = { extras = ["srv"], version = "^3.13.0" } -python = "3.9.6" - -[tool.poetry.group.dev.dependencies] -bandit = "^1.7.0" -black = "^22.1.0" -flake8 = "^3.9.2" -isort = "^5.9.3" -mypy = "0.812" -pip-audit = "^2.4.6" -poetryup = "^0.3.8" -pytest = "^7.2.0" -pytest-cov = "^2.12.1" -types-psutil = "^5.9.5" - -[build-system] -build-backend = "poetry.core.masonry.api" -requires = ["poetry-core>=1.0.0"] - -[tool.pytest.ini_options] -# addopts = "-k 'wip'" -filterwarnings = ["ignore::DeprecationWarning"] -markers = [ - "wip: tests being developed" -] - -[tool.coverage.run] -source = ["libqueue"] - -[tool.isort] -profile = "black" - -[tool.black] -line-length = 119 -preview = true - -[tool.mypy] -strict = true diff --git a/libs/libqueue/src/libqueue/__init__.py b/libs/libqueue/src/libqueue/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/libs/libqueue/src/libqueue/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/libs/libqueue/src/libqueue/config.py b/libs/libqueue/src/libqueue/config.py deleted file mode 100644 index 7be56966..00000000 --- a/libs/libqueue/src/libqueue/config.py +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from environs import Env - -from libqueue.queue import connect_to_database - - -class QueueConfig: - max_jobs_per_namespace: int - max_load_pct: int - max_memory_pct: int - mongo_database: str - mongo_url: str - sleep_seconds: int - - def __init__(self): - env = Env(expand_vars=True) - with env.prefixed("QUEUE_"): - self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_queue") - self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") - self.max_jobs_per_namespace = env.int(name="MAX_JOBS_PER_NAMESPACE", default=1) - self.max_load_pct = env.int(name="MAX_LOAD_PCT", default=70) - self.max_memory_pct = env.int(name="MAX_MEMORY_PCT", default=80) - self.sleep_seconds = env.int(name="SLEEP_SECONDS", default=15) - self.setup() - - def setup(self): - connect_to_database(database=self.mongo_database, host=self.mongo_url) diff --git a/libs/libqueue/src/libqueue/py.typed b/libs/libqueue/src/libqueue/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/libs/libqueue/src/libqueue/worker.py b/libs/libqueue/src/libqueue/worker.py deleted file mode 100644 index 4154132a..00000000 --- a/libs/libqueue/src/libqueue/worker.py +++ /dev/null @@ -1,149 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -import random -import time -from abc import ABC, abstractmethod -from typing import Literal, Optional - -from packaging import version -from psutil import cpu_count, getloadavg, swap_memory, virtual_memory - -from libqueue.config import QueueConfig -from libqueue.queue import EmptyQueueError, Queue, Status - - -def parse_version(string_version: str) -> version.Version: - parsed_version = version.parse(string_version) - if isinstance(parsed_version, version.LegacyVersion): - raise ValueError(f"LegacyVersion is not supported: {parsed_version}") - return parsed_version - - -class Worker(ABC): - queue_config: QueueConfig - version: str - - @property - @abstractmethod - def queue(self) -> Queue: - pass - - def __init__(self, queue_config: QueueConfig, version: str) -> None: - self.queue_config = queue_config - self.version = version - - def has_memory(self) -> bool: - if self.queue_config.max_memory_pct <= 0: - return True - virtual_memory_used: int = virtual_memory().used # type: ignore - virtual_memory_total: int = virtual_memory().total # type: ignore - percent = (swap_memory().used + virtual_memory_used) / (swap_memory().total + virtual_memory_total) - ok = percent < self.queue_config.max_memory_pct - if not ok: - logging.info( - f"memory usage (RAM + SWAP) is too high: {percent:.0f}% - max is {self.queue_config.max_memory_pct}%" - ) - return ok - - def has_cpu(self) -> bool: - if self.queue_config.max_load_pct <= 0: - return True - load_pct = max(getloadavg()[:2]) / cpu_count() * 100 - # ^ only current load and 5m load. 15m load is not relevant to decide to launch a new job - ok = load_pct < self.queue_config.max_load_pct - if not ok: - logging.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.queue_config.max_load_pct}%") - return ok - - def sleep(self) -> None: - jitter = 0.75 + random.random() / 2 # nosec - # ^ between 0.75 and 1.25 - duration = self.queue_config.sleep_seconds * jitter - logging.debug(f"sleep during {duration:.2f} seconds") - time.sleep(duration) - - def loop(self) -> None: - try: - while True: - if self.has_memory() and self.has_cpu() and self.process_next_job(): - # loop immediately to try another job - # see https://github.com/huggingface/datasets-server/issues/265 - continue - self.sleep() - except BaseException as e: - logging.critical(f"quit due to an uncaught error while processing the job: {e}") - raise - - def process_next_job(self) -> bool: - logging.debug("try to process a job") - - try: - started_job_info = self.queue.start_job() - job_id = started_job_info["job_id"] - dataset = started_job_info["dataset"] - config = started_job_info["config"] - split = started_job_info["split"] - force = started_job_info["force"] - parameters_for_log = "dataset={dataset}" + ("" if split is None else f"config={config} split={split}") - logging.debug(f"job assigned: {job_id} for {parameters_for_log}") - except EmptyQueueError: - logging.debug("no job in the queue") - return False - - finished_status: Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED] - try: - logging.info(f"compute {parameters_for_log}") - finished_status = ( - Status.SKIPPED - if self.should_skip_job(dataset=dataset, config=config, split=split, force=force) - else Status.SUCCESS - if self.compute(dataset=dataset, config=config, split=split, force=force) - else Status.ERROR - ) - except Exception: - logging.exception(f"error while computing {parameters_for_log}") - finished_status = Status.ERROR - finally: - self.queue.finish_job(job_id=job_id, finished_status=finished_status) - logging.debug(f"job finished with {finished_status.value}: {job_id} for {parameters_for_log}") - return True - - def compare_major_version(self, other_version: str) -> int: - """ - Compare the major version of worker's self version and the other version's. - - Args: - other_version (:obj:`str`): the other semantic version - - Returns: - :obj:`int`: the difference between the major version of both versions. - 0 if they are equal. Negative if worker's major version is lower than other_version, positive otherwise. - Raises: - :obj:`ValueError`: if worker's version or other_version is not a valid semantic version. - """ - try: - return parse_version(self.version).major - parse_version(other_version).major - except Exception as err: - raise RuntimeError(f"Could not get major versions: {err}") from err - - @abstractmethod - def should_skip_job( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> bool: - pass - - @abstractmethod - def compute( - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> bool: - pass diff --git a/libs/libqueue/tests/__init__.py b/libs/libqueue/tests/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/libs/libqueue/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/libs/libqueue/tests/conftest.py b/libs/libqueue/tests/conftest.py deleted file mode 100644 index f38337bc..00000000 --- a/libs/libqueue/tests/conftest.py +++ /dev/null @@ -1,23 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from pytest import MonkeyPatch, fixture - -from libqueue.config import QueueConfig - - -# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 -@fixture(scope="session") -def monkeypatch_session(): - monkeypatch_session = MonkeyPatch() - monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") - yield monkeypatch_session - monkeypatch_session.undo() - - -@fixture(scope="session", autouse=True) -def queue_config(monkeypatch_session: MonkeyPatch) -> QueueConfig: - queue_config = QueueConfig() - if "test" not in queue_config.mongo_database: - raise ValueError("Test must be launched on a test mongo database") - return queue_config diff --git a/libs/libqueue/tests/test_worker.py b/libs/libqueue/tests/test_worker.py deleted file mode 100644 index ebb53db7..00000000 --- a/libs/libqueue/tests/test_worker.py +++ /dev/null @@ -1,40 +0,0 @@ -import pytest - -from libqueue.worker import parse_version - -from .utils import DummyWorker - - [email protected]( - "string_version, expected_major_version, should_raise", - [ - ("1.0.0", 1, False), - ("3.1.2", 3, False), - ("1.1", 1, False), - ("not a version", None, True), - ], -) -def test_parse_version(string_version: str, expected_major_version: int, should_raise: bool) -> None: - if should_raise: - with pytest.raises(Exception): - parse_version(string_version) - else: - assert parse_version(string_version).major == expected_major_version - - [email protected]( - "worker_version, other_version, expected, should_raise", - [ - ("1.0.0", "1.0.1", 0, False), - ("1.0.0", "2.0.1", -1, False), - ("2.0.0", "1.0.1", 1, False), - ("not a version", "1.0.1", None, True), - ], -) -def test_compare_major_version(worker_version: str, other_version: str, expected: int, should_raise: bool) -> None: - worker = DummyWorker(version=worker_version) - if should_raise: - with pytest.raises(Exception): - worker.compare_major_version(other_version) - else: - assert worker.compare_major_version(other_version) == expected diff --git a/libs/libqueue/tests/utils.py b/libs/libqueue/tests/utils.py deleted file mode 100644 index 46298eb2..00000000 --- a/libs/libqueue/tests/utils.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Optional - -from libqueue.config import QueueConfig -from libqueue.queue import Queue -from libqueue.worker import Worker - - -class DummyWorker(Worker): - def __init__(self, version: str): - super().__init__(queue_config=QueueConfig(), version=version) - - @property - def queue(self) -> Queue: - return Queue("queue_type") - - def should_skip_job( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> bool: - return super().should_skip_job(dataset=dataset, config=config, split=split, force=force) - - def compute( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> bool: - return super().compute(dataset=dataset, config=config, split=split, force=force) diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index 0dc2da45..f7a3e536 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -24,2 +23,0 @@ WORKDIR /src -COPY libs/libcache/dist ./libs/libcache/dist -COPY libs/libqueue/dist ./libs/libqueue/dist @@ -30 +27,0 @@ COPY services/admin/pyproject.toml ./services/admin/pyproject.toml -COPY services/admin/Scripts.mk ./services/admin/Makefile diff --git a/services/admin/README.md b/services/admin/README.md index 4482dc9c..91132ff7 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -3 +3 @@ -> Admin scripts and endpoints +> Admin endpoints @@ -7 +7 @@ -The worker con be configured using environment variables. They are grouped by scope. +The worker can be configured using environment variables. They are grouped by scope. @@ -30,8 +29,0 @@ The following environment variables are used to configure the Uvicorn server (`A -### Cache - -See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. - -### Queue - -See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. - @@ -47,5 +39,2 @@ The admin service provides endpoints: -- `/metrics`: gives info about the cache and the queue -- `/cache-reports`: give detailed reports on the content of the cache: - - `/cache-reports/features` - - `/cache-reports/first-rows` - - `/cache-reports/splits` +- `/metrics`: give info about the cache and the queue +- `/cache-reports/{processing_step}`: give detailed reports on the content of the cache for a processing step @@ -53,24 +42,4 @@ The admin service provides endpoints: -- `/force-refresh`: force refresh cache entries. It's a POST endpoint: - - `/force-refresh/splits?dataset={dataset}` - - `/force-refresh/first-rows?dataset={dataset}&config={config}&split={split}` - -## Scripts - -The scripts: - -- `cancel-jobs-splits`: cancel all the started jobs for /splits (stop the workers before!) -- `cancel-jobs-first-rows`: cancel all the started jobs for /first-rows (stop the workers before!) - -To launch the scripts: - -- if the image runs in a docker container: - - ```shell - docker exec -it datasets-server_admin_1 make <SCRIPT> - ``` - -- if the image runs in a kube pod: - - ```shell - kubectl exec datasets-server-prod-admin-5cc8f8fcd7-k7jfc -- make <SCRIPT> - ``` +- `/force-refresh/{processing_step}`: force refresh cache entries for the processing step. It's a POST endpoint. Pass the requested parameters, depending on the processing step's input type: + - `dataset`: `?dataset={dataset}` + - `split`: `?dataset={dataset}&config={config}&split={split}` +- `/cancel-jobs/{processing_step}`: cancel all the started jobs for the processing step (stop the corresponding workers before!). It's a POST endpoint.: diff --git a/services/admin/Scripts.mk b/services/admin/Scripts.mk deleted file mode 100644 index 3608dd4f..00000000 --- a/services/admin/Scripts.mk +++ /dev/null @@ -1,7 +0,0 @@ -.PHONY: cancel-jobs-splits -cancel-jobs-splits: - poetry run python src/admin/scripts/cancel_jobs_splits.py - -.PHONY: cancel-jobs-first-rows -cancel-jobs-first-rows: - poetry run python src/admin/scripts/cancel_jobs_first_rows.py diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 02911565..2bb24282 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -251 +251 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -255 +255 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -299 +299 @@ name = "huggingface-hub" -version = "0.11.0" +version = "0.11.1" @@ -354,19 +353,0 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] -[[package]] -name = "libcache" -version = "0.4.3" -description = "Library for the cache in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} - -[package.source] -type = "file" -url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" - @@ -375 +356 @@ name = "libcommon" -version = "0.3.3" +version = "0.5.1" @@ -381,0 +363 @@ python-versions = "==3.9.6" +appdirs = ">=1.4.4,<2.0.0" @@ -383,16 +365 @@ environs = ">=9.5.0,<10.0.0" -orjson = ">=3.6.4,<4.0.0" - -[package.source] -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl" - -[[package]] -name = "libqueue" -version = "0.4.13" -description = "Library for the jobs queue in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -environs = ">=9.5.0,<10.0.0" +huggingface-hub = ">=0.11.0,<0.12.0" @@ -401 +368 @@ mongoengine = ">=0.24.1,<0.25.0" -packaging = ">=21.3,<22.0" +orjson = ">=3.6.4,<4.0.0" @@ -407 +374 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl" @@ -561 +528 @@ name = "pip-audit" -version = "2.4.6" +version = "2.4.7" @@ -833 +800 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -1024 +991 @@ python-versions = "3.9.6" -content-hash = "fea2b0d92b950310e73d15ef4a4029a600fd183996ad2c28259a2c846ade8c40" +content-hash = "d819cdf3b8a0c01bb42c4ffb7b524e25a4009709aaa82602ff83879f57e7ace0" @@ -1171,2 +1138,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -1187,2 +1154,2 @@ huggingface-hub = [ - {file = "huggingface_hub-0.11.0-py3-none-any.whl", hash = "sha256:1f540c6d57cb1684d3578d7bf2d35041a5145b17e8af932505db7f4fbcc7640d"}, - {file = "huggingface_hub-0.11.0.tar.gz", hash = "sha256:b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6"}, + {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, + {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, @@ -1202,3 +1168,0 @@ isort = [ -libcache = [ - {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, -] @@ -1206,4 +1170 @@ libcommon = [ - {file = "libcommon-0.3.3-py3-none-any.whl", hash = "sha256:b56b6e48bb2c92c3dfc619fc25718db930bc356f1e54fb9f8a67ba4597cb499d"}, -] -libqueue = [ - {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, + {file = "libcommon-0.5.1-py3-none-any.whl", hash = "sha256:a8d585735a14eb4a1df02fe9e2444627541c5cf7a8a10d9c79ca2d36cf90aff8"}, @@ -1389,2 +1350,2 @@ pip-audit = [ - {file = "pip_audit-2.4.6-py3-none-any.whl", hash = "sha256:d6d830bdbe3fd3efaf54f4a203451f286e75aecb7e44f9f84f7bfbd38aba26ac"}, - {file = "pip_audit-2.4.6.tar.gz", hash = "sha256:00ebef2a52884627f255b879135e28001de4378b8005318b66cc3a802459ee0a"}, + {file = "pip_audit-2.4.7-py3-none-any.whl", hash = "sha256:a99f825ee431a89b89981c4e9e6eaacff5af3233783f2f5d79fe03306dc378ce"}, + {file = "pip_audit-2.4.7.tar.gz", hash = "sha256:f87b37b6db5317a3f5ecebc202b5d4401958b5e4bd05b39d7b230bdc6f63c34b"}, @@ -1626,2 +1587,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 7e409622..75475512 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10,4 +10 @@ environs = "^9.5.0" -huggingface-hub = "^0.11.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl", develop = false } @@ -23,0 +21 @@ flake8 = "^3.9.2" +huggingface-hub = "^0.11.0" @@ -37 +35 @@ requires = ["poetry-core>=1.0.0"] -#addopts = "-k 'wip'" +# addopts = "-k 'wip'" diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 6e5bee6b..19797deb 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -15,4 +15,2 @@ from admin.routes.cache_reports import create_cache_reports_endpoint -from admin.routes.force_refresh_first_rows import ( - create_force_refresh_first_rows_endpoint, -) -from admin.routes.force_refresh_splits import create_force_refresh_splits_endpoint +from admin.routes.cancel_jobs import create_cancel_jobs_endpoint +from admin.routes.force_refresh import create_force_refresh_endpoint @@ -21 +18,0 @@ from admin.routes.pending_jobs import create_pending_jobs_endpoint -from admin.utils import CacheKind @@ -26 +23,2 @@ def create_app() -> Starlette: - prometheus = Prometheus() + processing_steps = list(app_config.processing_graph.graph.steps.values()) + prometheus = Prometheus(processing_steps=processing_steps) @@ -35,10 +33,13 @@ def create_app() -> Starlette: - routes = [ - Route("/healthcheck", endpoint=healthcheck_endpoint), - Route("/metrics", endpoint=prometheus.endpoint), - Route( - "/force-refresh/first-rows", - endpoint=create_force_refresh_first_rows_endpoint( - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - external_auth_url=app_config.admin.external_auth_url, - organization=app_config.admin.hf_organization, + routes = ( + [ + Route("/healthcheck", endpoint=healthcheck_endpoint), + Route("/metrics", endpoint=prometheus.endpoint), + # used in a browser tab to monitor the queue + Route( + "/pending-jobs", + endpoint=create_pending_jobs_endpoint( + processing_steps=processing_steps, + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), @@ -46,54 +47,40 @@ def create_app() -> Starlette: - methods=["POST"], - ), - Route( - "/force-refresh/splits", - endpoint=create_force_refresh_splits_endpoint( - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, - external_auth_url=app_config.admin.external_auth_url, - organization=app_config.admin.hf_organization, - ), - methods=["POST"], - ), - # TODO: re-enable. Possibly using tags - # used by https://observablehq.com/@huggingface/quality-assessment-of-datasets-loading - # Route( - # "/cache-reports/features", - # endpoint=create_cache_reports_endpoint( - # cache_kind="features", - # cache_reports_num_results=app_config.admin.cache_reports_num_results, - # max_age=app_config.admin.max_age, - # external_auth_url=app_config.admin.external_auth_url, - # organization=app_config.admin.hf_organization, - # ), - # ), - Route( - "/cache-reports/first-rows", - endpoint=create_cache_reports_endpoint( - kind=CacheKind.FIRST_ROWS, - cache_reports_num_results=app_config.admin.cache_reports_num_results, - max_age=app_config.admin.max_age, - external_auth_url=app_config.admin.external_auth_url, - organization=app_config.admin.hf_organization, - ), - ), - Route( - "/cache-reports/splits", - endpoint=create_cache_reports_endpoint( - kind=CacheKind.SPLITS, - cache_reports_num_results=app_config.admin.cache_reports_num_results, - max_age=app_config.admin.max_age, - external_auth_url=app_config.admin.external_auth_url, - organization=app_config.admin.hf_organization, - ), - ), - # used in a browser tab to monitor the queue - Route( - "/pending-jobs", - endpoint=create_pending_jobs_endpoint( - max_age=app_config.admin.max_age, - external_auth_url=app_config.admin.external_auth_url, - organization=app_config.admin.hf_organization, - ), - ), - ] + ] + + [ + Route( + f"/force-refresh{processing_step.endpoint}", + endpoint=create_force_refresh_endpoint( + processing_step=processing_step, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + methods=["POST"], + ) + for processing_step in processing_steps + ] + + [ + Route( + f"/cache-reports{processing_step.endpoint}", + endpoint=create_cache_reports_endpoint( + processing_step=processing_step, + cache_reports_num_results=app_config.admin.cache_reports_num_results, + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + ) + for processing_step in processing_steps + ] + + [ + Route( + f"/cancel-jobs{processing_step.endpoint}", + endpoint=create_cancel_jobs_endpoint( + processing_step=processing_step, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + ) + for processing_step in processing_steps + ] + ) diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index 17b74c7f..92c42555 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -7,3 +7,6 @@ from environs import Env -from libcache.config import CacheConfig -from libcommon.config import CommonConfig -from libqueue.config import QueueConfig +from libcommon.config import ( + CacheConfig, + CommonConfig, + ProcessingGraphConfig, + QueueConfig, +) @@ -46,0 +50 @@ class AppConfig: + processing_graph: ProcessingGraphConfig @@ -52,0 +57 @@ class AppConfig: + self.processing_graph = ProcessingGraphConfig() diff --git a/services/admin/src/admin/dataset.py b/services/admin/src/admin/dataset.py deleted file mode 100644 index 622c8c82..00000000 --- a/services/admin/src/admin/dataset.py +++ /dev/null @@ -1,50 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from typing import Optional - -from huggingface_hub.hf_api import HfApi -from huggingface_hub.utils import RepositoryNotFoundError -from libqueue.queue import Queue - -from admin.utils import JobType - -splits_queue = Queue(type=JobType.SPLITS.value) -first_rows_queue = Queue(type=JobType.FIRST_ROWS.value) - - -def is_supported( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, -) -> bool: - """ - Check if the dataset exists on the Hub and is supported by the datasets-server. - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, *optional*): - An authentication token (See https://huggingface.co/settings/token) - Returns: - [`bool`]: True if the dataset is supported by the datasets-server. - """ - try: - # note that token is required to access gated dataset info - info = HfApi(endpoint=hf_endpoint).dataset_info(dataset, token=hf_token) - except RepositoryNotFoundError: - return False - return info.private is False - - -def update_splits(dataset: str, force: bool = False) -> None: - logging.debug(f"refresh /splits for {dataset}") - splits_queue.add_job(dataset=dataset, force=force) - - -def update_first_rows(dataset: str, config: str, split: str, force: bool = False) -> None: - logging.debug(f"refresh /first-rows for {dataset}, {config}, {split}") - first_rows_queue.add_job(dataset=dataset, config=config, split=split, force=force) diff --git a/services/admin/src/admin/prometheus.py b/services/admin/src/admin/prometheus.py index 7c3657c2..14fe0185 100644 --- a/services/admin/src/admin/prometheus.py +++ b/services/admin/src/admin/prometheus.py @@ -4,0 +5,2 @@ import os +from dataclasses import dataclass +from typing import List @@ -6,2 +8,3 @@ import os -from libcache.simple_cache import get_responses_count_by_kind_status_and_error_code -from libqueue.queue import Queue +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue +from libcommon.simple_cache import get_responses_count_by_kind_status_and_error_code @@ -21,2 +23,0 @@ from starlette.responses import Response -from admin.utils import JobType - @@ -37,0 +39 @@ RESPONSES_IN_CACHE_TOTAL = Gauge( +@dataclass @@ -39,6 +41 @@ class Prometheus: - first_rows_queue: Queue - split_queue: Queue - - def __init__(self): - self.split_queue = Queue(type=JobType.SPLITS.value) - self.first_rows_queue = Queue(type=JobType.FIRST_ROWS.value) + processing_steps: List[ProcessingStep] @@ -58,4 +55,3 @@ class Prometheus: - for status, total in self.split_queue.get_jobs_count_by_status().items(): - QUEUE_JOBS_TOTAL.labels(queue=JobType.SPLITS.value, status=status).set(total) - for status, total in self.first_rows_queue.get_jobs_count_by_status().items(): - QUEUE_JOBS_TOTAL.labels(queue=JobType.FIRST_ROWS.value, status=status).set(total) + for processing_step in self.processing_steps: + for status, total in Queue(type=processing_step.job_type).get_jobs_count_by_status().items(): + QUEUE_JOBS_TOTAL.labels(queue=processing_step.job_type, status=status).set(total) diff --git a/services/admin/src/admin/routes/cache_reports.py b/services/admin/src/admin/routes/cache_reports.py index c26a8e81..4ed5c721 100644 --- a/services/admin/src/admin/routes/cache_reports.py +++ b/services/admin/src/admin/routes/cache_reports.py @@ -7 +7,2 @@ from typing import Optional -from libcache.simple_cache import InvalidCursor, InvalidLimit, get_cache_reports +from libcommon.processing_graph import ProcessingStep +from libcommon.simple_cache import InvalidCursor, InvalidLimit, get_cache_reports @@ -14 +14,0 @@ from admin.utils import ( - CacheKind, @@ -24 +24 @@ def create_cache_reports_endpoint( - kind: CacheKind, + processing_step: ProcessingStep, @@ -33 +33 @@ def create_cache_reports_endpoint( - logging.info(f"Cache reports for {kind.value}, cursor={cursor}") + logging.info(f"Cache reports for {processing_step.cache_kind}, cursor={cursor}") @@ -38 +38 @@ def create_cache_reports_endpoint( - get_cache_reports(kind=kind.value, cursor=cursor, limit=cache_reports_num_results), + get_cache_reports(kind=processing_step.cache_kind, cursor=cursor, limit=cache_reports_num_results), diff --git a/services/admin/src/admin/routes/force_refresh_splits.py b/services/admin/src/admin/routes/cancel_jobs.py similarity index 54% rename from services/admin/src/admin/routes/force_refresh_splits.py rename to services/admin/src/admin/routes/cancel_jobs.py index 6f642b47..445e5db2 100644 --- a/services/admin/src/admin/routes/force_refresh_splits.py +++ b/services/admin/src/admin/routes/cancel_jobs.py @@ -6,0 +7,2 @@ from typing import Optional +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue @@ -11 +12,0 @@ from admin.authentication import auth_check -from admin.dataset import is_supported, update_splits @@ -15 +15,0 @@ from admin.utils import ( - MissingRequiredParameterError, @@ -17,2 +16,0 @@ from admin.utils import ( - UnsupportedDatasetError, - are_valid_parameters, @@ -24,3 +22,2 @@ from admin.utils import ( -def create_force_refresh_splits_endpoint( - hf_endpoint: str, - hf_token: Optional[str] = None, +def create_cancel_jobs_endpoint( + processing_step: ProcessingStep, @@ -30 +27 @@ def create_force_refresh_splits_endpoint( - async def force_refresh_splits_endpoint(request: Request) -> Response: + async def cancel_jobs_endpoint(request: Request) -> Response: @@ -32,2 +29 @@ def create_force_refresh_splits_endpoint( - dataset = request.query_params.get("dataset") - logging.info("/force-refresh/splits, dataset={dataset}") + logging.info(f"/cancel-jobs{processing_step.endpoint}") @@ -35,2 +30,0 @@ def create_force_refresh_splits_endpoint( - if not are_valid_parameters([dataset]): - raise MissingRequiredParameterError("Parameter 'dataset' is required") @@ -39,3 +33 @@ def create_force_refresh_splits_endpoint( - if not is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): - raise UnsupportedDatasetError(f"Dataset '{dataset}' is not supported.") - update_splits(dataset=dataset, force=True) + Queue(type=processing_step.job_type).cancel_started_jobs() @@ -51 +43 @@ def create_force_refresh_splits_endpoint( - return force_refresh_splits_endpoint + return cancel_jobs_endpoint diff --git a/services/admin/src/admin/routes/force_refresh.py b/services/admin/src/admin/routes/force_refresh.py new file mode 100644 index 00000000..134e0391 --- /dev/null +++ b/services/admin/src/admin/routes/force_refresh.py @@ -0,0 +1,62 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from libcommon.dataset import DatasetError, check_support +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue +from starlette.requests import Request +from starlette.responses import Response + +from admin.authentication import auth_check +from admin.utils import ( + AdminCustomError, + Endpoint, + MissingRequiredParameterError, + UnexpectedError, + are_valid_parameters, + get_json_admin_error_response, + get_json_ok_response, +) + + +def create_force_refresh_endpoint( + processing_step: ProcessingStep, + hf_endpoint: str, + hf_token: Optional[str] = None, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, +) -> Endpoint: + async def force_refresh_endpoint(request: Request) -> Response: + try: + dataset = request.query_params.get("dataset") + if not are_valid_parameters([dataset]): + raise MissingRequiredParameterError("Parameter 'dataset' is required") + if processing_step.input_type == "dataset": + config = None + split = None + else: + config = request.query_params.get("config") + split = request.query_params.get("split") + if not are_valid_parameters([config, split]): + raise MissingRequiredParameterError("Parameters 'config' and 'split' are required") + logging.info( + f"/force-refresh{processing_step.endpoint}, dataset={dataset}, config={config}, split={split}" + ) + + # if auth_check fails, it will raise an exception that will be caught below + auth_check(external_auth_url=external_auth_url, request=request, organization=organization) + check_support(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + Queue(type=processing_step.job_type).add_job(dataset=dataset, config=config, split=split, force=True) + return get_json_ok_response( + {"status": "ok"}, + max_age=0, + ) + except (DatasetError, AdminCustomError) as e: + return get_json_admin_error_response(e, max_age=0) + except Exception: + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=0) + + return force_refresh_endpoint diff --git a/services/admin/src/admin/routes/force_refresh_first_rows.py b/services/admin/src/admin/routes/force_refresh_first_rows.py deleted file mode 100644 index eed967cd..00000000 --- a/services/admin/src/admin/routes/force_refresh_first_rows.py +++ /dev/null @@ -1,53 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from typing import Optional - -from starlette.requests import Request -from starlette.responses import Response - -from admin.authentication import auth_check -from admin.dataset import is_supported, update_first_rows -from admin.utils import ( - AdminCustomError, - Endpoint, - MissingRequiredParameterError, - UnexpectedError, - UnsupportedDatasetError, - are_valid_parameters, - get_json_admin_error_response, - get_json_ok_response, -) - - -def create_force_refresh_first_rows_endpoint( - hf_endpoint: str, - hf_token: Optional[str] = None, - external_auth_url: Optional[str] = None, - organization: Optional[str] = None, -) -> Endpoint: - async def force_refresh_first_rows_endpoint(request: Request) -> Response: - try: - dataset = request.query_params.get("dataset") - config = request.query_params.get("config") - split = request.query_params.get("split") - logging.info(f"/force-refresh/first-rows, dataset={dataset}, config={config}, split={split}") - - if not are_valid_parameters([dataset, config, split]): - raise MissingRequiredParameterError("Parameters 'dataset', 'config' and 'split' are required") - # if auth_check fails, it will raise an exception that will be caught below - auth_check(external_auth_url=external_auth_url, request=request, organization=organization) - if not is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): - raise UnsupportedDatasetError(f"Dataset '{dataset}' is not supported.") - update_first_rows(dataset=dataset, config=config, split=split, force=True) - return get_json_ok_response( - {"status": "ok"}, - max_age=0, - ) - except AdminCustomError as e: - return get_json_admin_error_response(e, max_age=0) - except Exception: - return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=0) - - return force_refresh_first_rows_endpoint diff --git a/services/admin/src/admin/routes/pending_jobs.py b/services/admin/src/admin/routes/pending_jobs.py index 51f1a6f2..dd154e7b 100644 --- a/services/admin/src/admin/routes/pending_jobs.py +++ b/services/admin/src/admin/routes/pending_jobs.py @@ -5 +5 @@ import logging -from typing import Optional +from typing import List, Optional @@ -7 +7,2 @@ from typing import Optional -from libqueue.queue import Queue +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import Queue @@ -15 +15,0 @@ from admin.utils import ( - JobType, @@ -23 +23,4 @@ def create_pending_jobs_endpoint( - max_age: int, external_auth_url: Optional[str] = None, organization: Optional[str] = None + processing_steps: List[ProcessingStep], + max_age: int, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, @@ -25,3 +27,0 @@ def create_pending_jobs_endpoint( - splits_queue = Queue(type=JobType.SPLITS.value) - first_rows_queue = Queue(type=JobType.FIRST_ROWS.value) - @@ -35,2 +35,2 @@ def create_pending_jobs_endpoint( - JobType.SPLITS.value: splits_queue.get_dump_by_pending_status(), - JobType.FIRST_ROWS.value: first_rows_queue.get_dump_by_pending_status(), + processing_step.endpoint: Queue(type=processing_step.job_type).get_dump_by_pending_status() + for processing_step in processing_steps diff --git a/services/admin/src/admin/scripts/__init__.py b/services/admin/src/admin/scripts/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/services/admin/src/admin/scripts/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/services/admin/src/admin/scripts/cancel_jobs_first_rows.py b/services/admin/src/admin/scripts/cancel_jobs_first_rows.py deleted file mode 100644 index 88866722..00000000 --- a/services/admin/src/admin/scripts/cancel_jobs_first_rows.py +++ /dev/null @@ -1,14 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging - -from libqueue.queue import Queue - -from admin.config import AppConfig -from admin.utils import JobType - -if __name__ == "__main__": - app_config = AppConfig() - Queue(type=JobType.FIRST_ROWS.value).cancel_started_jobs() - logging.info("all the started jobs in the first_rows/ queue have been cancelled and re-enqueued") diff --git a/services/admin/src/admin/scripts/cancel_jobs_splits.py b/services/admin/src/admin/scripts/cancel_jobs_splits.py deleted file mode 100644 index ae497b10..00000000 --- a/services/admin/src/admin/scripts/cancel_jobs_splits.py +++ /dev/null @@ -1,14 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging - -from libqueue.queue import Queue - -from admin.config import AppConfig -from admin.utils import JobType - -if __name__ == "__main__": - app_config = AppConfig() - Queue(type=JobType.SPLITS.value).cancel_started_jobs() - logging.info("all the started jobs in the splits/ queue have been cancelled and re-enqueued") diff --git a/services/admin/src/admin/utils.py b/services/admin/src/admin/utils.py index 62012b15..801fa0ba 100644 --- a/services/admin/src/admin/utils.py +++ b/services/admin/src/admin/utils.py @@ -4 +3,0 @@ -from enum import Enum @@ -108 +107 @@ def get_json_error_response( -def get_json_admin_error_response(error: AdminCustomError, max_age: int) -> Response: +def get_json_admin_error_response(error: CustomError, max_age: int) -> Response: @@ -123,10 +121,0 @@ Endpoint = Callable[[Request], Coroutine[Any, Any, Response]] - - -class JobType(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" - - -class CacheKind(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" diff --git a/services/admin/tests/conftest.py b/services/admin/tests/conftest.py index 188a62a5..128e5a56 100644 --- a/services/admin/tests/conftest.py +++ b/services/admin/tests/conftest.py @@ -3,0 +4,3 @@ +from typing import List + +from libcommon.processing_graph import ProcessingStep @@ -29,0 +33,5 @@ def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: + + +@fixture(scope="session") +def processing_steps(app_config: AppConfig) -> List[ProcessingStep]: + return list(app_config.processing_graph.graph.steps.values()) diff --git a/services/admin/tests/fixtures/hub.py b/services/admin/tests/fixtures/hub.py index a4ce36ed..aa79005b 100644 --- a/services/admin/tests/fixtures/hub.py +++ b/services/admin/tests/fixtures/hub.py @@ -8 +8 @@ from contextlib import contextmanager, suppress -from typing import Dict, Iterable, Literal, Optional, TypedDict +from typing import Iterable, Literal, Mapping, Optional, TypedDict @@ -36 +36 @@ def update_repo_settings( -) -> Dict[str, bool]: +) -> Mapping[str, bool]: diff --git a/services/admin/tests/test_app.py b/services/admin/tests/test_app.py index 51730175..17cf7075 100644 --- a/services/admin/tests/test_app.py +++ b/services/admin/tests/test_app.py @@ -4 +4 @@ -from typing import Optional +from typing import List, Optional @@ -7,2 +7,3 @@ import pytest -from libcache.simple_cache import _clean_cache_database -from libqueue.queue import _clean_queue_database +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import _clean_cache_database @@ -12 +13 @@ from admin.app import create_app -from admin.utils import JobType +from admin.config import AppConfig @@ -21 +22 @@ def client(monkeypatch_session: pytest.MonkeyPatch) -> TestClient: -def clean_mongo_databases() -> None: +def clean_mongo_databases(app_config: AppConfig) -> None: @@ -73 +74 @@ def test_metrics(client: TestClient) -> None: -def test_pending_jobs(client: TestClient) -> None: +def test_pending_jobs(client: TestClient, processing_steps: List[ProcessingStep]) -> None: @@ -77,2 +78,2 @@ def test_pending_jobs(client: TestClient) -> None: - for _, job_type in JobType.__members__.items(): - assert json[job_type.value] == {"waiting": [], "started": []} + for processing_step in processing_steps: + assert json[processing_step.job_type] == {"waiting": [], "started": []} @@ -82 +83 @@ def test_pending_jobs(client: TestClient) -> None: - "path,cursor,http_status,error_code", + "cursor,http_status,error_code", @@ -84,6 +85,3 @@ def test_pending_jobs(client: TestClient) -> None: - ("/splits", None, 200, None), - ("/splits", "", 200, None), - ("/splits", "invalid cursor", 422, "InvalidParameter"), - ("/first-rows", None, 200, None), - ("/first-rows", "", 200, None), - ("/first-rows", "invalid cursor", 422, "InvalidParameter"), + (None, 200, None), + ("", 200, None), + ("invalid cursor", 422, "InvalidParameter"), @@ -93 +91,5 @@ def test_cache_reports( - client: TestClient, path: str, cursor: Optional[str], http_status: int, error_code: Optional[str] + client: TestClient, + processing_steps: List[ProcessingStep], + cursor: Optional[str], + http_status: int, + error_code: Optional[str], @@ -94,0 +97 @@ def test_cache_reports( + path = processing_steps[0].endpoint diff --git a/services/admin/tests/test_authentication.py b/services/admin/tests/test_authentication.py index 00b4c46c..866e6893 100644 --- a/services/admin/tests/test_authentication.py +++ b/services/admin/tests/test_authentication.py @@ -4 +4 @@ -from typing import Dict, Optional, Type +from typing import Mapping, Optional, Type @@ -64 +64 @@ def test_org(org: str, status: int, error: Optional[Type[Exception]]) -> None: -def create_request(headers: Dict[str, str]) -> Request: +def create_request(headers: Mapping[str, str]) -> Request: diff --git a/services/admin/tests/test_prometheus.py b/services/admin/tests/test_prometheus.py index 5c45580a..bde3a2e6 100644 --- a/services/admin/tests/test_prometheus.py +++ b/services/admin/tests/test_prometheus.py @@ -1,0 +2,3 @@ import os +from typing import List + +from libcommon.processing_graph import ProcessingStep @@ -5 +7,0 @@ from admin.prometheus import Prometheus -from admin.utils import JobType @@ -8 +10 @@ from admin.utils import JobType -def test_prometheus(app_config: AppConfig) -> None: +def test_prometheus(app_config: AppConfig, processing_steps: List[ProcessingStep]) -> None: @@ -12 +14 @@ def test_prometheus(app_config: AppConfig) -> None: - prometheus = Prometheus() + prometheus = Prometheus(processing_steps=processing_steps) @@ -29,2 +31,5 @@ def test_prometheus(app_config: AppConfig) -> None: - for _, job_type in JobType.__members__.items(): - assert "queue_jobs_total{" + additional_field + 'queue="' + job_type.value + '",status="started"}' in metrics + for processing_step in processing_steps: + assert ( + "queue_jobs_total{" + additional_field + 'queue="' + processing_step.job_type + '",status="started"}' + in metrics + ) diff --git a/services/api/Dockerfile b/services/api/Dockerfile index 9e0f6650..a514af67 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -24,2 +23,0 @@ WORKDIR /src -COPY libs/libcache/dist ./libs/libcache/dist -COPY libs/libqueue/dist ./libs/libqueue/dist diff --git a/services/api/README.md b/services/api/README.md index 978520a1..d492f2cd 100644 --- a/services/api/README.md +++ b/services/api/README.md @@ -29,8 +28,0 @@ The following environment variables are used to configure the Uvicorn server (`A -### Cache - -See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. - -### Queue - -See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. - diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 39db46d4..d9d994c7 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -251 +251 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -255 +255 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -370,19 +369,0 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- -[[package]] -name = "libcache" -version = "0.4.3" -description = "Library for the cache in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} - -[package.source] -type = "file" -url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" - @@ -391 +372 @@ name = "libcommon" -version = "0.3.3" +version = "0.5.1" @@ -397,0 +379 @@ python-versions = "==3.9.6" +appdirs = ">=1.4.4,<2.0.0" @@ -399,16 +381 @@ environs = ">=9.5.0,<10.0.0" -orjson = ">=3.6.4,<4.0.0" - -[package.source] -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl" - -[[package]] -name = "libqueue" -version = "0.4.13" -description = "Library for the jobs queue in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -environs = ">=9.5.0,<10.0.0" +huggingface-hub = ">=0.11.0,<0.12.0" @@ -417 +384 @@ mongoengine = ">=0.24.1,<0.25.0" -packaging = ">=21.3,<22.0" +orjson = ">=3.6.4,<4.0.0" @@ -423 +390 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl" @@ -861 +828 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -1066 +1033 @@ python-versions = "3.9.6" -content-hash = "2a3e653f65a138001dd2d8d68462536ec5f3f40476e9363204e72a041e20d8e4" +content-hash = "169c56202dc047e7954fb71768fc1a06ad1b2e4cdbe14bb2e3e73c271ca9b83d" @@ -1213,2 +1180,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -1248,3 +1214,0 @@ jsonschema = [ -libcache = [ - {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, -] @@ -1252,4 +1216 @@ libcommon = [ - {file = "libcommon-0.3.3-py3-none-any.whl", hash = "sha256:b56b6e48bb2c92c3dfc619fc25718db930bc356f1e54fb9f8a67ba4597cb499d"}, -] -libqueue = [ - {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, + {file = "libcommon-0.5.1-py3-none-any.whl", hash = "sha256:a8d585735a14eb4a1df02fe9e2444627541c5cf7a8a10d9c79ca2d36cf90aff8"}, @@ -1738,2 +1699,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index cbd312ac..0eea603f 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12,3 +12 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.1-py3-none-any.whl", develop = false } @@ -26,0 +25 @@ mypy = "0.812" +pip-audit = "^2.4.6" @@ -31 +29,0 @@ pytest-httpserver = "^1.0.6" -pip-audit = "^2.4.6" @@ -38 +36 @@ requires = ["poetry-core>=1.0.0"] -addopts = "-k 'not deprecated'" +# addopts = "-k 'wip'" @@ -41 +38,0 @@ markers = [ - "deprecated: tests on deprecated code (deselect with '-m \"not deprecated\"')", diff --git a/services/api/src/api/app.py b/services/api/src/api/app.py index 62590437..49621535 100644 --- a/services/api/src/api/app.py +++ b/services/api/src/api/app.py @@ -17 +16,0 @@ from api.prometheus import Prometheus -from api.routes.first_rows import create_first_rows_endpoint @@ -19,2 +18,2 @@ from api.routes.healthcheck import healthcheck_endpoint -from api.routes.splits import create_splits_endpoint -from api.routes.valid import create_is_valid_endpoint, valid_endpoint +from api.routes.processing_step import create_processing_step_endpoint +from api.routes.valid import create_is_valid_endpoint, create_valid_endpoint @@ -35,2 +34,9 @@ def create_app() -> Starlette: - documented: List[BaseRoute] = [ - Route("/valid", endpoint=valid_endpoint), + valid: List[BaseRoute] = [ + Route( + "/valid", + endpoint=create_valid_endpoint( + processing_steps_for_valid=app_config.processing_graph.graph.get_steps_required_by_dataset_viewer(), + max_age_long=app_config.api.max_age_long, + max_age_short=app_config.api.max_age_short, + ), + ), @@ -40,0 +47,3 @@ def create_app() -> Starlette: + processing_steps_for_valid=app_config.processing_graph.graph.get_steps_required_by_dataset_viewer(), + max_age_long=app_config.api.max_age_long, + max_age_short=app_config.api.max_age_short, @@ -42 +51 @@ def create_app() -> Starlette: - ), + ) @@ -43,0 +53,2 @@ def create_app() -> Starlette: + ] + processing_steps: List[BaseRoute] = [ @@ -45,3 +56,4 @@ def create_app() -> Starlette: - "/first-rows", - endpoint=create_first_rows_endpoint( - external_auth_url=app_config.api.external_auth_url, + processing_step.endpoint, + endpoint=create_processing_step_endpoint( + processing_step=processing_step, + init_processing_steps=app_config.processing_graph.graph.get_first_steps(), @@ -50,5 +61,0 @@ def create_app() -> Starlette: - ), - ), - Route( - "/splits", - endpoint=create_splits_endpoint( @@ -56,2 +63,2 @@ def create_app() -> Starlette: - hf_endpoint=app_config.common.hf_endpoint, - hf_token=app_config.common.hf_token, + max_age_long=app_config.api.max_age_long, + max_age_short=app_config.api.max_age_short, @@ -59 +66,2 @@ def create_app() -> Starlette: - ), + ) + for processing_step in list(app_config.processing_graph.graph.steps.values()) @@ -66 +74,3 @@ def create_app() -> Starlette: - hf_endpoint=app_config.common.hf_endpoint, hf_token=app_config.common.hf_token + init_processing_steps=app_config.processing_graph.graph.get_first_steps(), + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, @@ -84 +94 @@ def create_app() -> Starlette: - routes: List[BaseRoute] = documented + to_protect + protected + for_development_only + routes: List[BaseRoute] = valid + processing_steps + to_protect + protected + for_development_only diff --git a/services/api/src/api/config.py b/services/api/src/api/config.py index 24a5519d..928458bc 100644 --- a/services/api/src/api/config.py +++ b/services/api/src/api/config.py @@ -7,3 +7,6 @@ from environs import Env -from libcache.config import CacheConfig -from libcommon.config import CommonConfig -from libqueue.config import QueueConfig +from libcommon.config import ( + CacheConfig, + CommonConfig, + ProcessingGraphConfig, + QueueConfig, +) @@ -44,0 +48 @@ class AppConfig: + processing_graph: ProcessingGraphConfig @@ -49,0 +54 @@ class AppConfig: + self.processing_graph = ProcessingGraphConfig() diff --git a/services/api/src/api/dataset.py b/services/api/src/api/dataset.py deleted file mode 100644 index 04f8d500..00000000 --- a/services/api/src/api/dataset.py +++ /dev/null @@ -1,112 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from http import HTTPStatus -from typing import Optional - -from huggingface_hub.hf_api import HfApi -from huggingface_hub.utils import RepositoryNotFoundError -from libcache.simple_cache import DoesNotExist, delete_dataset_responses, get_response -from libqueue.queue import Queue - -from api.utils import CacheKind, JobType - -splits_queue = Queue(type=JobType.SPLITS.value) -first_rows_queue = Queue(type=JobType.FIRST_ROWS.value) - - -def is_supported( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, -) -> bool: - """ - Check if the dataset exists on the Hub and is supported by the datasets-server. - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, *optional*): - An authentication token (See https://huggingface.co/settings/token) - Returns: - [`bool`]: True if the dataset is supported by the datasets-server. - """ - try: - # note that token is required to access gated dataset info - info = HfApi(endpoint=hf_endpoint).dataset_info(dataset, token=hf_token) - except RepositoryNotFoundError: - return False - return info.private is False - - -def update(dataset: str, hf_endpoint: str, hf_token: Optional[str] = None, force: bool = False) -> bool: - if is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): - logging.debug(f"refresh dataset='{dataset}'") - splits_queue.add_job(dataset=dataset, force=force) - return True - else: - logging.debug(f"can't refresh dataset='{dataset}', it's not supported (does not exist, private, etc.)") - return False - - -def delete(dataset: str) -> bool: - logging.debug(f"delete cache for dataset='{dataset}'") - delete_dataset_responses(dataset=dataset) - return True - - -def move( - from_dataset: str, to_dataset: str, hf_endpoint: str, hf_token: Optional[str] = None, force: bool = False -) -> bool: - # not optimal as we might try to rename instead - if update(dataset=to_dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=force): - return delete(dataset=from_dataset) - else: - return False - - -def is_splits_in_process( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, -) -> bool: - if splits_queue.is_job_in_process(dataset=dataset): - # the /splits response is not ready yet - return True - return update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=False) - - -def is_first_rows_in_process( - dataset: str, config: str, split: str, hf_endpoint: str, hf_token: Optional[str] = None -) -> bool: - if first_rows_queue.is_job_in_process( - dataset=dataset, config=config, split=split - ) or splits_queue.is_job_in_process(dataset=dataset): - return True - - # a bit convoluted, but to check if the first-rows response should exist, - # we have to check the content of the /splits response for the same dataset - try: - result = get_response(kind=CacheKind.SPLITS.value, dataset=dataset) - except DoesNotExist: - # the splits responses does not exist, update - return update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) - - if result["http_status"] == HTTPStatus.OK and any( - split_item["dataset"] == dataset or split_item["config"] == config or split_item["split"] == split - for split_item in result["content"]["splits"] - ): - # The split is listed in the /splits response. - # Let's refresh *the whole dataset*, because something did not work - # Note that we "force" the refresh - # - # Caveat: we don't check if the /first-rows response already exists in the cache, - # because we assume it's the reason why one would call this function - return update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=True) - else: - # the /splits response is an error, or the split is not listed in the /splits response, so it's normal - # that it's not in the cache - return False diff --git a/services/api/src/api/routes/first_rows.py b/services/api/src/api/routes/first_rows.py deleted file mode 100644 index 48a04938..00000000 --- a/services/api/src/api/routes/first_rows.py +++ /dev/null @@ -1,72 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from http import HTTPStatus -from typing import Optional - -from libcache.simple_cache import DoesNotExist, get_response -from starlette.requests import Request -from starlette.responses import Response - -from api.authentication import auth_check -from api.dataset import is_first_rows_in_process -from api.utils import ( - ApiCustomError, - CacheKind, - Endpoint, - FirstRowsResponseNotFoundError, - FirstRowsResponseNotReadyError, - MissingRequiredParameterError, - UnexpectedError, - are_valid_parameters, - get_json_api_error_response, - get_json_error_response, - get_json_ok_response, -) - - -def create_first_rows_endpoint( - hf_endpoint: str, - hf_token: Optional[str] = None, - external_auth_url: Optional[str] = None, - max_age_long: int = 0, - max_age_short: int = 0, -) -> Endpoint: - async def first_rows_endpoint(request: Request) -> Response: - try: - dataset = request.query_params.get("dataset") - config = request.query_params.get("config") - split = request.query_params.get("split") - logging.info(f"/first-rows, dataset={dataset}, config={config}, split={split}") - - if not are_valid_parameters([dataset, config, split]): - raise MissingRequiredParameterError("Parameters 'dataset', 'config' and 'split' are required") - # if auth_check fails, it will raise an exception that will be caught below - auth_check(dataset, external_auth_url=external_auth_url, request=request) - try: - result = get_response(kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split) - content = result["content"] - http_status = result["http_status"] - error_code = result["error_code"] - if http_status == HTTPStatus.OK: - return get_json_ok_response(content=content, max_age=max_age_long) - else: - return get_json_error_response( - content=content, status_code=http_status, max_age=max_age_short, error_code=error_code - ) - except DoesNotExist as e: - # maybe the first-rows response is in process - if is_first_rows_in_process( - dataset=dataset, config=config, split=split, hf_endpoint=hf_endpoint, hf_token=hf_token - ): - raise FirstRowsResponseNotReadyError( - "The list of the first rows is not ready yet. Please retry later." - ) from e - raise FirstRowsResponseNotFoundError("Not found.") from e - except ApiCustomError as e: - return get_json_api_error_response(error=e, max_age=max_age_short) - except Exception as e: - return get_json_api_error_response(error=UnexpectedError("Unexpected error.", e), max_age=max_age_short) - - return first_rows_endpoint diff --git a/services/api/src/api/routes/processing_step.py b/services/api/src/api/routes/processing_step.py new file mode 100644 index 00000000..20f4ef2e --- /dev/null +++ b/services/api/src/api/routes/processing_step.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from http import HTTPStatus +from typing import List, Optional + +from libcommon.dataset import DatasetError +from libcommon.operations import PreviousStepError, check_in_process +from libcommon.processing_graph import ProcessingStep +from libcommon.simple_cache import DoesNotExist, get_response +from starlette.requests import Request +from starlette.responses import Response + +from api.authentication import auth_check +from api.utils import ( + ApiCustomError, + Endpoint, + MissingRequiredParameterError, + ResponseNotFoundError, + ResponseNotReadyError, + UnexpectedError, + are_valid_parameters, + get_json_api_error_response, + get_json_error_response, + get_json_ok_response, +) + + +def create_processing_step_endpoint( + processing_step: ProcessingStep, + init_processing_steps: List[ProcessingStep], + hf_endpoint: str, + hf_token: Optional[str] = None, + external_auth_url: Optional[str] = None, + max_age_long: int = 0, + max_age_short: int = 0, +) -> Endpoint: + async def processing_step_endpoint(request: Request) -> Response: + try: + dataset = request.query_params.get("dataset") + if not are_valid_parameters([dataset]): + raise MissingRequiredParameterError("Parameter 'dataset' is required") + if processing_step.input_type == "dataset": + config = None + split = None + else: + config = request.query_params.get("config") + split = request.query_params.get("split") + if not are_valid_parameters([config, split]): + raise MissingRequiredParameterError("Parameters 'config' and 'split' are required") + logging.info(f"{processing_step.endpoint}, dataset={dataset}, config={config}, split={split}") + + # if auth_check fails, it will raise an exception that will be caught below + auth_check(dataset, external_auth_url=external_auth_url, request=request) + try: + result = get_response(kind=processing_step.cache_kind, dataset=dataset, config=config, split=split) + content = result["content"] + http_status = result["http_status"] + error_code = result["error_code"] + if http_status == HTTPStatus.OK: + return get_json_ok_response(content=content, max_age=max_age_long) + else: + return get_json_error_response( + content=content, status_code=http_status, max_age=max_age_short, error_code=error_code + ) + except DoesNotExist as e: + # maybe the response is in process + try: + check_in_process( + processing_step=processing_step, + init_processing_steps=init_processing_steps, + dataset=dataset, + config=config, + split=split, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + ) + except (PreviousStepError, DatasetError): + raise ResponseNotFoundError("Not found.") from e + raise ResponseNotReadyError("The response is not ready yet. Please retry later.") from e + except ApiCustomError as e: + return get_json_api_error_response(error=e, max_age=max_age_short) + except Exception as e: + return get_json_api_error_response(error=UnexpectedError("Unexpected error.", e), max_age=max_age_short) + + return processing_step_endpoint diff --git a/services/api/src/api/routes/splits.py b/services/api/src/api/routes/splits.py deleted file mode 100644 index d3c7e835..00000000 --- a/services/api/src/api/routes/splits.py +++ /dev/null @@ -1,68 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from http import HTTPStatus -from typing import Optional - -from libcache.simple_cache import DoesNotExist, get_response -from starlette.requests import Request -from starlette.responses import Response - -from api.authentication import auth_check -from api.dataset import is_splits_in_process -from api.utils import ( - ApiCustomError, - CacheKind, - Endpoint, - MissingRequiredParameterError, - SplitsResponseNotFoundError, - SplitsResponseNotReadyError, - UnexpectedError, - are_valid_parameters, - get_json_api_error_response, - get_json_error_response, - get_json_ok_response, -) - - -def create_splits_endpoint( - hf_endpoint: str, - hf_token: Optional[str] = None, - external_auth_url: Optional[str] = None, - max_age_long: int = 0, - max_age_short: int = 0, -) -> Endpoint: - async def splits_endpoint(request: Request) -> Response: - try: - dataset = request.query_params.get("dataset") - logging.info(f"/splits, dataset={dataset}") - - if not are_valid_parameters([dataset]): - raise MissingRequiredParameterError("Parameter 'dataset' is required") - # if auth_check fails, it will raise an exception that will be caught below - auth_check(dataset, external_auth_url=external_auth_url, request=request) - try: - result = get_response(kind=CacheKind.SPLITS.value, dataset=dataset) - content = result["content"] - http_status = result["http_status"] - error_code = result["error_code"] - if http_status == HTTPStatus.OK: - return get_json_ok_response(content=content, max_age=max_age_long) - else: - return get_json_error_response( - content=content, status_code=http_status, max_age=max_age_short, error_code=error_code - ) - except DoesNotExist as e: - # maybe the splits response is in process - if is_splits_in_process(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): - raise SplitsResponseNotReadyError( - "The list of splits is not ready yet. Please retry later." - ) from e - raise SplitsResponseNotFoundError("Not found.") from e - except ApiCustomError as e: - return get_json_api_error_response(error=e, max_age=max_age_short) - except Exception as err: - return get_json_api_error_response(error=UnexpectedError("Unexpected error.", err), max_age=max_age_short) - - return splits_endpoint diff --git a/services/api/src/api/routes/valid.py b/services/api/src/api/routes/valid.py index 856512ca..1ca2f95f 100644 --- a/services/api/src/api/routes/valid.py +++ b/services/api/src/api/routes/valid.py @@ -5 +5 @@ import logging -from typing import List, Optional +from typing import List, Optional, Set @@ -7 +7,2 @@ from typing import List, Optional -from libcache.simple_cache import get_valid_datasets, get_validity_by_kind +from libcommon.processing_graph import ProcessingStep +from libcommon.simple_cache import get_valid_datasets, get_validity_by_kind @@ -14 +14,0 @@ from api.utils import ( - CacheKind, @@ -24,6 +24,10 @@ from api.utils import ( -def get_valid() -> List[str]: - # a dataset is considered valid if: - # - the /splits response is valid - datasets = get_valid_datasets(kind=CacheKind.SPLITS.value) - # - at least one of the /first-rows responses is valid - datasets.intersection_update(get_valid_datasets(kind=CacheKind.FIRST_ROWS.value)) +def get_valid(processing_steps_for_valid: List[ProcessingStep]) -> List[str]: + # a dataset is considered valid if at least one response for PROCESSING_STEPS_FOR_VALID + # is valid. + datasets: Optional[Set[str]] = None + for processing_step in processing_steps_for_valid: + kind_datasets = get_valid_datasets(kind=processing_step.cache_kind) + if datasets is None: + datasets = kind_datasets + else: + datasets.intersection_update(kind_datasets) @@ -31 +35 @@ def get_valid() -> List[str]: - return sorted(datasets) + return [] if datasets is None else sorted(datasets) @@ -34,4 +38,3 @@ def get_valid() -> List[str]: -def is_valid(dataset: str) -> bool: - # a dataset is considered valid if: - # - the /splits response is valid - # - at least one of the /first-rows responses is valid +def is_valid(dataset: str, processing_steps_for_valid: List[ProcessingStep]) -> bool: + # a dataset is considered valid if at least one response for PROCESSING_STEPS_FOR_VALID + # is valid @@ -39,5 +42,3 @@ def is_valid(dataset: str) -> bool: - return ( - CacheKind.SPLITS.value in validity_by_kind - and validity_by_kind[CacheKind.SPLITS.value] - and CacheKind.FIRST_ROWS.value in validity_by_kind - and validity_by_kind[CacheKind.FIRST_ROWS.value] + return all( + processing_step.cache_kind in validity_by_kind and validity_by_kind[processing_step.cache_kind] + for processing_step in processing_steps_for_valid @@ -47,7 +48,15 @@ def is_valid(dataset: str) -> bool: -async def valid_endpoint(_: Request) -> Response: - try: - logging.info("/valid") - content = {"valid": get_valid()} - return get_json_ok_response(content) - except Exception: - return get_json_api_error_response(UnexpectedError("Unexpected error.")) +def create_valid_endpoint( + processing_steps_for_valid: List[ProcessingStep], + max_age_long: int = 0, + max_age_short: int = 0, +) -> Endpoint: + # this endpoint is used by the frontend to know which datasets support the dataset viewer + async def valid_endpoint(_: Request) -> Response: + try: + logging.info("/valid") + content = {"valid": get_valid(processing_steps_for_valid=processing_steps_for_valid)} + return get_json_ok_response(content, max_age=max_age_long) + except Exception: + return get_json_api_error_response(UnexpectedError("Unexpected error."), max_age=max_age_short) + + return valid_endpoint @@ -57 +66,4 @@ def create_is_valid_endpoint( - external_auth_url: Optional[str] = None, max_age_long: int = 0, max_age_short: int = 0 + processing_steps_for_valid: List[ProcessingStep], + external_auth_url: Optional[str] = None, + max_age_long: int = 0, + max_age_short: int = 0, @@ -58,0 +71 @@ def create_is_valid_endpoint( + # this endpoint is used to know if a dataset supports the dataset viewer @@ -68 +81 @@ def create_is_valid_endpoint( - "valid": is_valid(dataset), + "valid": is_valid(dataset=dataset, processing_steps_for_valid=processing_steps_for_valid), diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index 829119e3..0f25148c 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -7,0 +8,3 @@ from jsonschema import ValidationError, validate # type: ignore +from libcommon.dataset import DatasetError +from libcommon.operations import delete_dataset, move_dataset, update_dataset +from libcommon.processing_graph import ProcessingStep @@ -11 +13,0 @@ from starlette.responses import Response -from api.dataset import delete, move, update @@ -57 +59,6 @@ def parse_payload(json: Any) -> MoonWebhookV2Payload: -def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: Optional[str] = None) -> None: +def process_payload( + init_processing_steps: List[ProcessingStep], + payload: MoonWebhookV2Payload, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> None: @@ -65 +72,7 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=False) + update_dataset( + dataset=dataset, + init_processing_steps=init_processing_steps, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + force=False, + ) @@ -67 +80 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - delete(dataset=dataset) + delete_dataset(dataset=dataset) @@ -72,4 +85,13 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - move(from_dataset=dataset, to_dataset=moved_to, hf_endpoint=hf_endpoint, hf_token=hf_token, force=False) - - -def create_webhook_endpoint(hf_endpoint: str, hf_token: Optional[str] = None) -> Endpoint: + move_dataset( + from_dataset=dataset, + to_dataset=moved_to, + init_processing_steps=init_processing_steps, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + force=False, + ) + + +def create_webhook_endpoint( + init_processing_steps: List[ProcessingStep], hf_endpoint: str, hf_token: Optional[str] = None +) -> Endpoint: @@ -92 +114,10 @@ def create_webhook_endpoint(hf_endpoint: str, hf_token: Optional[str] = None) -> - process_payload(payload, hf_endpoint, hf_token) + try: + process_payload( + init_processing_steps=init_processing_steps, + payload=payload, + hf_endpoint=hf_endpoint, + hf_token=hf_token, + ) + except DatasetError: + content = {"status": "error", "error": "the dataset is not supported"} + return get_response(content, 400) diff --git a/services/api/src/api/utils.py b/services/api/src/api/utils.py index f0014bd2..293d7278 100644 --- a/services/api/src/api/utils.py +++ b/services/api/src/api/utils.py @@ -4 +3,0 @@ -from enum import Enum @@ -15,4 +14,2 @@ ApiErrorCode = Literal[ - "SplitsResponseNotReady", - "FirstRowsResponseNotReady", - "SplitsResponseNotFound", - "FirstRowsResponseNotFound", + "ResponseNotReady", + "ResponseNotFound", @@ -46,2 +43,2 @@ class MissingRequiredParameterError(ApiCustomError): -class SplitsResponseNotReadyError(ApiCustomError): - """Raised when the /splits response has not been processed yet.""" +class ResponseNotReadyError(ApiCustomError): + """Raised when the response has not been processed yet.""" @@ -50 +47 @@ class SplitsResponseNotReadyError(ApiCustomError): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "SplitsResponseNotReady") + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "ResponseNotReady") @@ -53,2 +50,2 @@ class SplitsResponseNotReadyError(ApiCustomError): -class FirstRowsResponseNotReadyError(ApiCustomError): - """Raised when the /first-rows response has not been processed yet.""" +class ResponseNotFoundError(ApiCustomError): + """Raised when the response has not been found.""" @@ -57,15 +54 @@ class FirstRowsResponseNotReadyError(ApiCustomError): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "FirstRowsResponseNotReady") - - -class FirstRowsResponseNotFoundError(ApiCustomError): - """Raised when the response for /first-rows has not been found.""" - - def __init__(self, message: str): - super().__init__(message, HTTPStatus.NOT_FOUND, "FirstRowsResponseNotFound") - - -class SplitsResponseNotFoundError(ApiCustomError): - """Raised when the response for /splits has not been found.""" - - def __init__(self, message: str): - super().__init__(message, HTTPStatus.NOT_FOUND, "SplitsResponseNotFound") + super().__init__(message, HTTPStatus.NOT_FOUND, "ResponseNotFound") @@ -142,10 +124,0 @@ Endpoint = Callable[[Request], Coroutine[Any, Any, Response]] - - -class JobType(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" - - -class CacheKind(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" diff --git a/services/api/tests/conftest.py b/services/api/tests/conftest.py index 4ad69dcb..db3b83b1 100644 --- a/services/api/tests/conftest.py +++ b/services/api/tests/conftest.py @@ -49,0 +50,10 @@ def hf_auth_path(app_config: AppConfig): + + +@fixture(scope="session") +def first_dataset_processing_step(app_config: AppConfig): + return next(step for step in app_config.processing_graph.graph.steps.values() if step.input_type == "dataset") + + +@fixture(scope="session") +def first_split_processing_step(app_config: AppConfig): + return next(step for step in app_config.processing_graph.graph.steps.values() if step.input_type == "split") diff --git a/services/api/tests/routes/test_valid.py b/services/api/tests/routes/test_valid.py index 1d846bf9..340ef913 100644 --- a/services/api/tests/routes/test_valid.py +++ b/services/api/tests/routes/test_valid.py @@ -1,0 +2 @@ from http import HTTPStatus +from typing import List @@ -4 +5,2 @@ import pytest -from libcache.simple_cache import _clean_cache_database, upsert_response +from libcommon.processing_graph import ProcessingStep +from libcommon.simple_cache import _clean_cache_database, upsert_response @@ -5,0 +8 @@ from libcache.simple_cache import _clean_cache_database, upsert_response +from api.config import AppConfig @@ -7 +10,19 @@ from api.routes.valid import get_valid, is_valid -from api.utils import CacheKind + +dataset_step = ProcessingStep( + endpoint="/dataset-step", + input_type="dataset", + requires=None, + required_by_dataset_viewer=False, + parent=None, + ancestors=[], + children=[], +) +split_step = ProcessingStep( + endpoint="/split-step", + input_type="split", + requires=None, + required_by_dataset_viewer=False, + parent=None, + ancestors=[], + children=[], +) @@ -11 +32 @@ from api.utils import CacheKind -def clean_mongo_databases() -> None: +def clean_mongo_databases(app_config: AppConfig) -> None: @@ -15,42 +36,25 @@ def clean_mongo_databases() -> None: -def test_empty() -> None: - assert get_valid() == [] - assert is_valid("dataset") is False - - -def test_only_splits() -> None: - dataset = "dataset" - upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) - assert get_valid() == [] - assert is_valid("dataset") is False - - -def test_only_first_rows() -> None: - dataset = "dataset" - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config="config", - split="split", - content={}, - http_status=HTTPStatus.OK, - ) - assert get_valid() == [] - assert is_valid("dataset") is False - - -def test_splits_and_first_rows_ok() -> None: - dataset = "dataset" - upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config="config", - split="split", - content={}, - http_status=HTTPStatus.OK, - ) - assert get_valid() == [dataset] - assert is_valid("dataset") is True - - -def test_splits_and_first_rows_ok_and_error() -> None: [email protected]( + "processing_steps_for_valid,expected_is_valid", + [ + ([], True), + ([dataset_step], False), + ([dataset_step, split_step], False), + ], +) +def test_empty(processing_steps_for_valid: List[ProcessingStep], expected_is_valid: bool) -> None: + assert get_valid(processing_steps_for_valid=processing_steps_for_valid) == [] + assert is_valid(dataset="dataset", processing_steps_for_valid=processing_steps_for_valid) is expected_is_valid + + [email protected]( + "processing_steps_for_valid,expected_is_valid,expected_valid", + [ + ([], True, []), + ([dataset_step], True, ["dataset"]), + ([split_step], False, []), + ([dataset_step, split_step], False, []), + ], +) +def test_one_step( + processing_steps_for_valid: List[ProcessingStep], expected_is_valid: bool, expected_valid: List[str] +) -> None: @@ -58,22 +62,17 @@ def test_splits_and_first_rows_ok_and_error() -> None: - upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config="config", - split="split_a", - content={}, - http_status=HTTPStatus.OK, - ) - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config="config", - split="split_b", - content={}, - http_status=HTTPStatus.INTERNAL_SERVER_ERROR, - ) - assert get_valid() == [dataset] - assert is_valid("dataset") is True - - -def test_splits_and_first_rows_only_errors() -> None: + upsert_response(kind=dataset_step.cache_kind, dataset=dataset, content={}, http_status=HTTPStatus.OK) + assert get_valid(processing_steps_for_valid=processing_steps_for_valid) == expected_valid + assert is_valid(dataset=dataset, processing_steps_for_valid=processing_steps_for_valid) is expected_is_valid + + [email protected]( + "processing_steps_for_valid,expected_is_valid,expected_valid", + [ + ([], True, []), + ([dataset_step], True, ["dataset"]), + ([split_step], True, ["dataset"]), + ([dataset_step, split_step], True, ["dataset"]), + ], +) +def test_two_steps( + processing_steps_for_valid: List[ProcessingStep], expected_is_valid: bool, expected_valid: List[str] +) -> None: @@ -81 +80,3 @@ def test_splits_and_first_rows_only_errors() -> None: - upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) + config = "config" + split = "split" + upsert_response(kind=dataset_step.cache_kind, dataset=dataset, content={}, http_status=HTTPStatus.OK) @@ -83,6 +84 @@ def test_splits_and_first_rows_only_errors() -> None: - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config="config", - split="split", - content={}, - http_status=HTTPStatus.INTERNAL_SERVER_ERROR, + kind=split_step.cache_kind, dataset=dataset, config=config, split=split, content={}, http_status=HTTPStatus.OK @@ -90,2 +86,2 @@ def test_splits_and_first_rows_only_errors() -> None: - assert get_valid() == [] - assert is_valid("dataset") is False + assert get_valid(processing_steps_for_valid=processing_steps_for_valid) == expected_valid + assert is_valid(dataset=dataset, processing_steps_for_valid=processing_steps_for_valid) is expected_is_valid @@ -94 +90,2 @@ def test_splits_and_first_rows_only_errors() -> None: -def test_valid_datasets() -> None: +def test_errors() -> None: + processing_steps_for_valid = [dataset_step] @@ -98,13 +95,2 @@ def test_valid_datasets() -> None: - upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) - upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) - upsert_response( - kind=CacheKind.SPLITS.value, dataset=dataset_c, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR - ) - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset_a, - config="config", - split="split", - content={}, - http_status=HTTPStatus.OK, - ) + upsert_response(kind=dataset_step.cache_kind, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=dataset_step.cache_kind, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) @@ -112,6 +98 @@ def test_valid_datasets() -> None: - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset_b, - config="config", - split="split", - content={}, - http_status=HTTPStatus.OK, + kind=dataset_step.cache_kind, dataset=dataset_c, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR @@ -119,4 +100,4 @@ def test_valid_datasets() -> None: - assert get_valid() == [dataset_a, dataset_b] - assert is_valid(dataset_a) is True - assert is_valid(dataset_b) is True - assert is_valid(dataset_c) is False + assert get_valid(processing_steps_for_valid=processing_steps_for_valid) == [dataset_a, dataset_b] + assert is_valid(dataset=dataset_a, processing_steps_for_valid=processing_steps_for_valid) is True + assert is_valid(dataset=dataset_b, processing_steps_for_valid=processing_steps_for_valid) is True + assert is_valid(dataset=dataset_c, processing_steps_for_valid=processing_steps_for_valid) is False diff --git a/services/api/tests/routes/test_webhook.py b/services/api/tests/routes/test_webhook.py new file mode 100644 index 00000000..70726f15 --- /dev/null +++ b/services/api/tests/routes/test_webhook.py @@ -0,0 +1,35 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Any, Mapping + +import pytest + +from api.routes.webhook import parse_payload + + [email protected]( + "payload,raises", + [ + ({"event": "add", "repo": {"type": "dataset", "name": "webhook-test", "gitalyUid": "123"}}, False), + ( + { + "event": "move", + "movedTo": "webhook-test", + "repo": {"type": "dataset", "name": "previous-name", "gitalyUid": "123"}, + }, + False, + ), + ({"event": "add", "repo": {"type": "dataset", "name": "webhook-test"}}, False), + ({"event": "doesnotexist", "repo": {"type": "dataset", "name": "webhook-test", "gitalyUid": "123"}}, True), + ], +) +def test_parse_payload( + payload: Mapping[str, Any], + raises: bool, +) -> None: + if raises: + with pytest.raises(Exception): + parse_payload(payload) + else: + parse_payload(payload) diff --git a/services/api/tests/test_app.py b/services/api/tests/test_app.py index 1e6a17c3..4267df8c 100644 --- a/services/api/tests/test_app.py +++ b/services/api/tests/test_app.py @@ -4,3 +4 @@ -import json -from http import HTTPStatus -from typing import Dict, Optional +from typing import Mapping, Optional @@ -9,2 +7,3 @@ import pytest -from libcache.simple_cache import _clean_cache_database, upsert_response -from libqueue.queue import Queue, _clean_queue_database +from libcommon.processing_graph import ProcessingStep +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import _clean_cache_database @@ -15 +14 @@ from api.app import create_app -from api.utils import CacheKind, JobType +from api.config import AppConfig @@ -26 +25 @@ def client(monkeypatch_session: pytest.MonkeyPatch) -> TestClient: -def clean_mongo_databases() -> None: +def clean_mongo_databases(app_config: AppConfig) -> None: @@ -31,4 +30 @@ def clean_mongo_databases() -> None: -splits_queue = Queue(type=JobType.SPLITS.value) - - -def test_cors(client: TestClient) -> None: +def test_cors(client: TestClient, first_dataset_processing_step: ProcessingStep) -> None: @@ -39 +35 @@ def test_cors(client: TestClient) -> None: - "/splits?dataset=dataset1", + f"{first_dataset_processing_step.endpoint}?dataset=dataset1", @@ -65,2 +61 @@ def test_get_valid_datasets(client: TestClient) -> None: - json = response.json() - assert "valid" in json + assert "valid" in response.json() @@ -83 +78 @@ def test_is_valid_auth( - headers: Dict[str, str], + headers: Mapping[str, str], @@ -100 +95 @@ def test_get_healthcheck(client: TestClient) -> None: -def test_get_splits(client: TestClient) -> None: +def test_get_step(client: TestClient, first_dataset_processing_step: ProcessingStep) -> None: @@ -102 +97 @@ def test_get_splits(client: TestClient) -> None: - response = client.get("/splits") + response = client.get(first_dataset_processing_step.endpoint) @@ -105 +100 @@ def test_get_splits(client: TestClient) -> None: - response = client.get("/splits?dataset=") + response = client.get(f"{first_dataset_processing_step.endpoint}?dataset=") @@ -109,28 +103,0 @@ def test_get_splits(client: TestClient) -> None: -# caveat: the returned status codes don't simulate the reality -# they're just used to check every case [email protected]( - "headers,status_code,error_code", - [ - ({"Cookie": "some cookie"}, 401, "ExternalUnauthenticatedError"), - ({"Authorization": "Bearer invalid"}, 404, "ExternalAuthenticatedError"), - ({}, 500, "SplitsResponseNotReady"), - ], -) -def test_splits_auth( - client: TestClient, - httpserver: HTTPServer, - hf_auth_path: str, - headers: Dict[str, str], - status_code: int, - error_code: str, -) -> None: - dataset = "dataset-which-does-not-exist" - httpserver.expect_request(hf_auth_path % dataset, headers=headers).respond_with_handler(auth_callback) - httpserver.expect_request(f"/api/datasets/{dataset}").respond_with_data( - json.dumps({}), headers={"X-Error-Code": "RepoNotFound"} - ) - response = client.get(f"/splits?dataset={dataset}", headers=headers) - assert response.status_code == status_code, f"{response.headers}, {response.json()}" - assert response.headers.get("X-Error-Code") == error_code - - @@ -147,15 +113,0 @@ def test_get_first_rows_missing_parameter( - client: TestClient, dataset: Optional[str], config: Optional[str], split: Optional[str] -) -> None: - response = client.get("/first-rows", params={"dataset": dataset, "config": config, "split": split}) - assert response.status_code == 422 - - [email protected]( - "exists,is_private,expected_error_code", - [ - (False, None, "ExternalAuthenticatedError"), - (True, True, "SplitsResponseNotFound"), - (True, False, "SplitsResponseNotReady"), - ], -) -def test_splits_cache_refreshing( @@ -163,44 +115,4 @@ def test_splits_cache_refreshing( - httpserver: HTTPServer, - hf_auth_path: str, - exists: bool, - is_private: Optional[bool], - expected_error_code: str, -) -> None: - dataset = "dataset-to-be-processed" - httpserver.expect_request(hf_auth_path % dataset).respond_with_data(status=200 if exists else 404) - httpserver.expect_request(f"/api/datasets/{dataset}").respond_with_data( - json.dumps({"private": is_private}), headers={} if exists else {"X-Error-Code": "RepoNotFound"} - ) - - response = client.get("/splits", params={"dataset": dataset}) - assert response.headers["X-Error-Code"] == expected_error_code - - if expected_error_code == "SplitsResponseNotReady": - # a subsequent request should return the same error code - response = client.get("/splits", params={"dataset": dataset}) - assert response.headers["X-Error-Code"] == expected_error_code - - # simulate the worker - upsert_response( - kind=CacheKind.SPLITS.value, dataset=dataset, content={"key": "value"}, http_status=HTTPStatus.OK - ) - response = client.get("/splits", params={"dataset": dataset}) - assert response.json()["key"] == "value" - assert response.status_code == 200 - - [email protected]( - "exists,is_private,expected_error_code", - [ - (False, None, "ExternalAuthenticatedError"), - (True, True, "FirstRowsResponseNotFound"), - (True, False, "FirstRowsResponseNotReady"), - ], -) -def test_first_rows_cache_refreshing( - client: TestClient, - httpserver: HTTPServer, - hf_auth_path: str, - exists: bool, - is_private: Optional[bool], - expected_error_code: str, + dataset: Optional[str], + config: Optional[str], + split: Optional[str], + first_split_processing_step: ProcessingStep, @@ -208,6 +120,2 @@ def test_first_rows_cache_refreshing( - dataset = "dataset-to-be-processed" - config = "default" - split = "train" - httpserver.expect_request(hf_auth_path % dataset).respond_with_data(status=200 if exists else 404) - httpserver.expect_request(f"/api/datasets/{dataset}").respond_with_data( - json.dumps({"private": is_private}), headers={} if exists else {"X-Error-Code": "RepoNotFound"} + response = client.get( + first_split_processing_step.endpoint, params={"dataset": dataset, "config": config, "split": split} @@ -215,21 +123 @@ def test_first_rows_cache_refreshing( - - response = client.get("/first-rows", params={"dataset": dataset, "config": config, "split": split}) - assert response.headers["X-Error-Code"] == expected_error_code - - if expected_error_code == "FirstRowsResponseNotReady": - # a subsequent request should return the same error code - response = client.get("/first-rows", params={"dataset": dataset, "config": config, "split": split}) - assert response.headers["X-Error-Code"] == expected_error_code - - # simulate the worker - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config=config, - split=split, - content={"key": "value"}, - http_status=HTTPStatus.OK, - ) - response = client.get("/first-rows", params={"dataset": dataset, "config": config, "split": split}) - assert response.json()["key"] == "value" - assert response.status_code == 200 + assert response.status_code == 422 @@ -249,48 +136,0 @@ def test_metrics(client: TestClient) -> None: - - [email protected]( - "payload,exists_on_the_hub,expected_status,expected_is_updated", - [ - ({"event": "add", "repo": {"type": "dataset", "name": "webhook-test", "gitalyUid": "123"}}, True, 200, True), - ( - { - "event": "move", - "movedTo": "webhook-test", - "repo": {"type": "dataset", "name": "previous-name", "gitalyUid": "123"}, - }, - True, - 200, - True, - ), - ( - {"event": "doesnotexist", "repo": {"type": "dataset", "name": "webhook-test", "gitalyUid": "123"}}, - True, - 400, - False, - ), - ( - {"event": "add", "repo": {"type": "dataset", "name": "webhook-test"}}, - True, - 200, - True, - ), - ({"event": "add", "repo": {"type": "dataset", "name": "webhook-test", "gitalyUid": "123"}}, False, 200, False), - ], -) -def test_webhook( - client: TestClient, - httpserver: HTTPServer, - payload: Dict, - exists_on_the_hub: bool, - expected_status: int, - expected_is_updated: bool, -) -> None: - dataset = "webhook-test" - headers = None if exists_on_the_hub else {"X-Error-Code": "RepoNotFound"} - status = 200 if exists_on_the_hub else 404 - httpserver.expect_request(f"/api/datasets/{dataset}").respond_with_data( - json.dumps({"private": False}), headers=headers, status=status - ) - response = client.post("/webhook", json=payload) - assert response.status_code == expected_status, response.text - assert splits_queue.is_job_in_process(dataset=dataset) is expected_is_updated diff --git a/services/api/tests/test_authentication.py b/services/api/tests/test_authentication.py index da07e0c0..3762c81e 100644 --- a/services/api/tests/test_authentication.py +++ b/services/api/tests/test_authentication.py @@ -5 +5 @@ from contextlib import nullcontext as does_not_raise -from typing import Any, Dict +from typing import Any, Mapping @@ -55 +55 @@ def test_external_auth_responses_without_request( -def create_request(headers: Dict[str, str]) -> Request: +def create_request(headers: Mapping[str, str]) -> Request: @@ -82 +82 @@ def test_valid_responses_with_request( - headers: Dict[str, str], + headers: Mapping[str, str], diff --git a/services/api/tests/test_dataset.py b/services/api/tests/test_dataset.py deleted file mode 100644 index f5f7c88e..00000000 --- a/services/api/tests/test_dataset.py +++ /dev/null @@ -1,23 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import json - -import pytest -from pytest_httpserver import HTTPServer - -from api.dataset import is_supported - - [email protected]( - "private,exists,expected", - [(True, False, False), (False, False, True), (True, False, False)], -) -def test_is_supported(httpserver: HTTPServer, hf_endpoint: str, private: bool, exists: bool, expected: bool) -> None: - dataset = "dataset" - endpoint = f"/api/datasets/{dataset}" - hf_token = "dummy_token" - - headers = None if exists else {"X-Error-Code": "RepoNotFound"} - httpserver.expect_request(endpoint).respond_with_data(json.dumps({"private": private}), headers=headers) - assert is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) is expected diff --git a/tools/docker-compose-empty.yml b/tools/docker-compose-empty.yml deleted file mode 100644 index 2fe3bedc..00000000 --- a/tools/docker-compose-empty.yml +++ /dev/null @@ -1 +0,0 @@ -version: "3.9" diff --git a/workers/first_rows/Dockerfile b/workers/first_rows/Dockerfile index f69afcdb..130b4aca 100644 --- a/workers/first_rows/Dockerfile +++ b/workers/first_rows/Dockerfile @@ -26,2 +25,0 @@ WORKDIR /src -COPY libs/libcache/dist ./libs/libcache/dist -COPY libs/libqueue/dist ./libs/libqueue/dist diff --git a/workers/first_rows/Makefile b/workers/first_rows/Makefile index f70a7259..a27aaf5f 100644 --- a/workers/first_rows/Makefile +++ b/workers/first_rows/Makefile @@ -19 +19 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^libcache @/,+1 d' | sed '/^libcommon @/,+1 d' | sed '/^libqueue @/,+1 d')" + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^libcommon @/,+1 d')" diff --git a/workers/first_rows/README.md b/workers/first_rows/README.md index 6812892d..7436a5d0 100644 --- a/workers/first_rows/README.md +++ b/workers/first_rows/README.md @@ -29,8 +28,0 @@ If the Hub is not https://huggingface.co (i.e. if you set the `COMMON_HF_ENDPOIN -### Cache - -See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. - -### Queue - -See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. - diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 5e9fd78a..5bf45816 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -770 +770 @@ name = "importlib-metadata" -version = "5.0.0" +version = "5.1.0" @@ -870,19 +869,0 @@ python-versions = ">=3" -[[package]] -name = "libcache" -version = "0.4.3" -description = "Library for the cache in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} - -[package.source] -type = "file" -url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" - @@ -899 +880 @@ name = "libcommon" -version = "0.3.3" +version = "0.5.0" @@ -905,0 +887 @@ python-versions = "==3.9.6" +appdirs = ">=1.4.4,<2.0.0" @@ -907,16 +889 @@ environs = ">=9.5.0,<10.0.0" -orjson = ">=3.6.4,<4.0.0" - -[package.source] -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl" - -[[package]] -name = "libqueue" -version = "0.4.13" -description = "Library for the jobs queue in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -environs = ">=9.5.0,<10.0.0" +huggingface-hub = ">=0.11.0,<0.12.0" @@ -925 +892 @@ mongoengine = ">=0.24.1,<0.25.0" -packaging = ">=21.3,<22.0" +orjson = ">=3.6.4,<4.0.0" @@ -931 +898 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl" @@ -2460 +2427 @@ python-versions = "3.9.6" -content-hash = "6e12ec014e04f388b4a2fdd4af6be9075e9ecf7aa38f49ec9c167e2c2d2ed33f" +content-hash = "152c9759656a3e25a2a16751584007e49545a1a227320371fec492a39e29ae59" @@ -3252,2 +3219,2 @@ importlib-metadata = [ - {file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"}, - {file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"}, + {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, + {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, @@ -3282,3 +3248,0 @@ kss = [ -libcache = [ - {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, -] @@ -3298,4 +3262 @@ libcommon = [ - {file = "libcommon-0.3.3-py3-none-any.whl", hash = "sha256:b56b6e48bb2c92c3dfc619fc25718db930bc356f1e54fb9f8a67ba4597cb499d"}, -] -libqueue = [ - {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, + {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:0267504716992f562382ff5029ace87444fd12793f2393f3800921d384a0fd52"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index b194f4ad..e8d26e48 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -20,3 +20 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/config.py b/workers/first_rows/src/first_rows/config.py index b9819777..997d773e 100644 --- a/workers/first_rows/src/first_rows/config.py +++ b/workers/first_rows/src/first_rows/config.py @@ -7,3 +7,7 @@ from environs import Env -from libcache.config import CacheConfig -from libcommon.config import CommonConfig -from libqueue.config import QueueConfig +from libcommon.config import ( + CacheConfig, + CommonConfig, + ProcessingGraphConfig, + QueueConfig, + WorkerConfig, +) @@ -29 +33 @@ class FirstRowsConfig: -class WorkerConfig: +class AppConfig: @@ -32,0 +37 @@ class WorkerConfig: + processing_graph: ProcessingGraphConfig @@ -33,0 +39 @@ class WorkerConfig: + worker: WorkerConfig @@ -39,0 +46 @@ class WorkerConfig: + self.processing_graph = ProcessingGraphConfig() @@ -40,0 +48 @@ class WorkerConfig: + self.worker = WorkerConfig() diff --git a/workers/first_rows/src/first_rows/main.py b/workers/first_rows/src/first_rows/main.py index 03828dec..7b154b9d 100644 --- a/workers/first_rows/src/first_rows/main.py +++ b/workers/first_rows/src/first_rows/main.py @@ -4 +4 @@ -from first_rows.config import WorkerConfig +from first_rows.config import AppConfig @@ -8,2 +8,3 @@ if __name__ == "__main__": - worker_config = WorkerConfig() - FirstRowsWorker(worker_config).loop() + app_config = AppConfig() + FIRST_ROWS_ENDPOINT = "/first-rows" + FirstRowsWorker(app_config=app_config, endpoint=FIRST_ROWS_ENDPOINT).loop() diff --git a/workers/first_rows/src/first_rows/response.py b/workers/first_rows/src/first_rows/response.py deleted file mode 100644 index 4b2726fb..00000000 --- a/workers/first_rows/src/first_rows/response.py +++ /dev/null @@ -1,500 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import itertools -import logging -from typing import Any, Dict, List, Optional, TypedDict, Union - -from datasets import ( - Dataset, - Features, - IterableDataset, - get_dataset_config_info, - get_dataset_config_names, - get_dataset_split_names, - load_dataset, -) -from datasets.data_files import EmptyDatasetError as _EmptyDatasetError -from huggingface_hub.hf_api import HfApi, RepositoryNotFoundError -from libcommon.utils import orjson_dumps - -from first_rows.features import get_cell_value -from first_rows.utils import ( - ConfigNotFoundError, - DatasetNotFoundError, - EmptyDatasetError, - FeaturesError, - InfoError, - NormalRowsError, - RowsPostProcessingError, - SplitNotFoundError, - SplitsNamesError, - StreamingRowsError, - retry, -) - -Row = Dict[str, Any] - - -class FeatureItem(TypedDict): - feature_idx: int - name: str - type: Dict[str, Any] - - -class RowItem(TypedDict): - row_idx: int - row: Dict[str, Any] - truncated_cells: List[str] - - -class FirstRowsResponse(TypedDict): - dataset: str - config: str - split: str - features: List[FeatureItem] - rows: List[RowItem] - - -class FirstRowsResponseResult(TypedDict): - first_rows_response: FirstRowsResponse - dataset_git_revision: Optional[str] - - -@retry() -def get_rows( - dataset: str, - config: str, - split: str, - streaming: bool, - rows_max_number: int, - use_auth_token: Union[bool, str, None] = False, -) -> List[Row]: - ds = load_dataset( - dataset, - name=config, - split=split, - streaming=streaming, - use_auth_token=use_auth_token, - ) - if streaming: - if not isinstance(ds, IterableDataset): - raise TypeError("load_dataset should return an IterableDataset in streaming mode") - elif not isinstance(ds, Dataset): - raise TypeError("load_dataset should return a Dataset in normal mode") - rows_plus_one = list(itertools.islice(ds, rows_max_number + 1)) - # ^^ to be able to detect if a split has exactly ROWS_MAX_NUMBER rows - if len(rows_plus_one) <= rows_max_number: - logging.debug(f"all the rows in the split have been fetched ({len(rows_plus_one)})") - else: - logging.debug(f"the rows in the split have been truncated ({rows_max_number} rows)") - return rows_plus_one[:rows_max_number] - - -def get_json_size(obj: Any) -> int: - """Returns the size of an object in bytes once serialized as JSON - - Args: - obj (Any): the Python object - - Returns: - int: the size of the serialized object in bytes - """ - return len(orjson_dumps(obj)) - - -# from https://stackoverflow.com/a/43848928/7351594 -def utf8_lead_byte(b: int) -> bool: - """A UTF-8 intermediate byte starts with the bits 10xxxxxx.""" - return (b & 0xC0) != 0x80 - - -def utf8_byte_truncate(text: str, max_bytes: int) -> str: - """If text[max_bytes] is not a lead byte, back up until a lead byte is - found and truncate before that character.""" - utf8 = text.encode("utf8") - if len(utf8) <= max_bytes: - return text - i = max_bytes - while i > 0 and not utf8_lead_byte(utf8[i]): - i -= 1 - return utf8[:i].decode("utf8", "ignore") - - -# Mutates row_item, and returns it anyway -def truncate_row_item(row_item: RowItem, min_cell_bytes: int) -> RowItem: - row = {} - for column_name, cell in row_item["row"].items(): - # for now: all the cells above min_cell_bytes are truncated to min_cell_bytes - # it's done by replacing the cell (which can have any type) by a string with - # its JSON serialization, and then truncating it to min_cell_bytes - cell_json = orjson_dumps(cell) - if len(cell_json) <= min_cell_bytes: - row[column_name] = cell - else: - cell_json_str = cell_json.decode("utf8", "ignore") - row_item["truncated_cells"].append(column_name) - row[column_name] = utf8_byte_truncate(text=cell_json_str, max_bytes=min_cell_bytes) - row_item["row"] = row - return row_item - - -COMMA_SIZE = 1 # the comma "," is encoded with one byte in utf-8 - - -# Mutates row_items, and returns them anyway -def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_bytes: int) -> List[RowItem]: - # compute the current size - rows_bytes = sum(get_json_size(row_item) for row_item in row_items) + COMMA_SIZE * (len(row_items) - 1) - - # Loop backwards, so that the last rows are truncated first - for row_item in reversed(row_items): - if rows_bytes < rows_max_bytes: - break - previous_size = get_json_size(row_item) + COMMA_SIZE - row_item = truncate_row_item(row_item=row_item, min_cell_bytes=min_cell_bytes) - new_size = get_json_size(row_item) + COMMA_SIZE - rows_bytes += new_size - previous_size - row_idx = row_item["row_idx"] - logging.debug(f"the size of the rows is now ({rows_bytes}) after truncating row idx={row_idx}") - return row_items - - -def to_row_item(dataset: str, config: str, split: str, row_idx: int, row: Row) -> RowItem: - return { - "row_idx": row_idx, - "row": row, - "truncated_cells": [], - } - - -def create_truncated_row_items( - dataset: str, - config: str, - split: str, - rows: List[Row], - min_cell_bytes: int, - rows_max_bytes: int, - rows_min_number: int, -) -> List[RowItem]: - row_items = [] - rows_bytes = 0 - - # two restrictions must be enforced: - # - at least rows_min_number rows - # - at most rows_max_bytes bytes. Note that it's the limit to the sum of the rows sizes. The JSON response size - # will be greater, due to the other fields (row_idx, truncated_cells, features, etc.). - # To enforce this: - # 1. first get the first rows_min_number rows - for row_idx, row in enumerate(rows[:rows_min_number]): - row_item = to_row_item(dataset, config, split, row_idx, row) - rows_bytes += get_json_size(row_item) + COMMA_SIZE - row_items.append(row_item) - - # 2. if the total is over the bytes limit, truncate the values, iterating backwards starting - # from the last rows, until getting under the threshold - # caveat: the truncation might not be enough to get under the threshold if: - # - the number of columns is too high - # - rows_max_bytes is too low (or even negative) - if rows_bytes >= rows_max_bytes: - logging.debug( - f"the size of the first {rows_min_number} rows ({rows_bytes}) is above the max number of bytes" - f" ({rows_max_bytes}), they will be truncated" - ) - return truncate_row_items(row_items=row_items, min_cell_bytes=min_cell_bytes, rows_max_bytes=rows_max_bytes) - - # 3. else: add the remaining rows until the end, or until the bytes threshold - for idx, row in enumerate(rows[rows_min_number:]): - row_idx = rows_min_number + idx - row_item = to_row_item(dataset, config, split, row_idx, row) - rows_bytes += get_json_size(row_item) + COMMA_SIZE - if rows_bytes >= rows_max_bytes: - logging.debug( - f"the rows in the split have been truncated to {row_idx} row(s) to keep the size" - f" ({rows_bytes}) under the limit ({rows_max_bytes})" - ) - break - row_items.append(row_item) - return row_items - - -def transform_rows( - dataset: str, - config: str, - split: str, - rows: List[Row], - features: Features, - assets_base_url: str, - assets_directory: str, -) -> List[Row]: - return [ - { - featureName: get_cell_value( - dataset=dataset, - config=config, - split=split, - row_idx=row_idx, - cell=row[featureName] if featureName in row else None, - featureName=featureName, - fieldType=fieldType, - assets_base_url=assets_base_url, - assets_directory=assets_directory, - ) - for (featureName, fieldType) in features.items() - } - for row_idx, row in enumerate(rows) - ] - - -# in JSON, dicts do not carry any order, so we need to return a list -# -# > An object is an *unordered* collection of zero or more name/value pairs, where a name is a string and a value -# is a string, number, boolean, null, object, or array. -# > An array is an *ordered* sequence of zero or more values. -# > The terms "object" and "array" come from the conventions of JavaScript. -# from https://stackoverflow.com/a/7214312/7351594 / https://www.rfc-editor.org/rfc/rfc7159.html -def to_features_list(dataset: str, config: str, split: str, features: Features) -> List[FeatureItem]: - features_dict = features.to_dict() - return [ - { - "feature_idx": idx, - "name": name, - "type": features_dict[name], - } - for idx, name in enumerate(features) - ] - - -class SplitFullName(TypedDict): - dataset: str - config: str - split: str - - -def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, None] = False) -> List[SplitFullName]: - logging.info(f"get dataset '{dataset}' split full names") - return [ - {"dataset": dataset, "config": config, "split": split} - for config in get_dataset_config_names(path=dataset, use_auth_token=use_auth_token) - for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) - ] - - -def get_dataset_git_revision( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, -) -> Union[str, None]: - """ - Get the git revision of the dataset. - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, *optional*): - An authentication token (See https://huggingface.co/settings/token) - Returns: - `Union[str, None]`: the dataset git revision (sha) if any. - <Tip> - Raises the following errors: - - [`~worker.exceptions.DatasetNotFoundError`] - If the repository to download from cannot be found. This may be because it doesn't exist, - or because it is set to `private` and you do not have access. - </Tip> - """ - try: - dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err - return dataset_info.sha - - -def compute_first_rows_response( - dataset: str, - config: str, - split: str, - assets_base_url: str, - hf_endpoint: str, - hf_token: Optional[str], - min_cell_bytes: int, - max_size_fallback: int, - rows_max_bytes: int, - rows_max_number: int, - rows_min_number: int, - assets_directory: str, -) -> FirstRowsResponseResult: - """ - Get the response of /first-rows for one specific split of a dataset from huggingface.co. - Dataset can be private or gated if you pass an acceptable token. - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - config (`str`): - A configuration name. - split (`str`): - A split name. - assets_base_url (`str`): - The base url of the assets. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str` or `None`): - An authentication token (See https://huggingface.co/settings/token) - max_size_fallback (`int`): - The maximum number of bytes of the split to fallback to normal mode if the streaming mode fails. - rows_max_bytes (`int`): - The maximum number of bytes of the response (else, the response is truncated). - rows_max_number (`int`): - The maximum number of rows of the response. - rows_min_number (`int`): - The minimum number of rows of the response. - Returns: - [`FirstRowsResponse`]: The list of first rows of the split. - <Tip> - Raises the following errors: - - [`~worker.exceptions.DatasetNotFoundError`] - If the repository to download from cannot be found. This may be because it doesn't exist, - or because it is set to `private` and you do not have access. - - [`~worker.exceptions.ConfigNotFoundError`] - If the config does not exist in the dataset. - - [`~worker.exceptions.SplitNotFoundError`] - If the split does not exist in the dataset. - - [`~worker.utils.InfoError`] - If the config info could not be obtained using the datasets library. - - [`~worker.utils.FeaturesError`] - If the split features could not be obtained using the datasets library. - - [`~worker.utils.StreamingRowsError`] - If the split rows could not be obtained using the datasets library in streaming mode. - - [`~worker.utils.NormalRowsError`] - If the split rows could not be obtained using the datasets library in normal mode. - - [`~worker.utils.RowsPostProcessingError`] - If the post-processing of the split rows failed, e.g. while saving the images or audio files to the assets. - </Tip> - """ - logging.info(f"get first-rows for dataset={dataset} config={config} split={split}") - use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False - # first ensure the tuple (dataset, config, split) exists on the Hub - # try to get the dataset config info. It raises if the dataset does not exist or is private - dataset_git_revision = get_dataset_git_revision(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) - # get the list of splits - try: - split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) - except _EmptyDatasetError as err: - raise EmptyDatasetError("The dataset is empty.", cause=err) from err - except Exception as err: - raise SplitsNamesError("Cannot get the split names for the dataset.", cause=err) from err - # ^ can raise DatasetNotFoundError or SplitsNamesError - if config not in [split_full_name["config"] for split_full_name in split_full_names]: - raise ConfigNotFoundError(f"config {config} does not exist for dataset {dataset}") - if {"dataset": dataset, "config": config, "split": split} not in [ - { - "dataset": split_full_name["dataset"], - "config": split_full_name["config"], - "split": split_full_name["split"], - } - for split_full_name in split_full_names - ]: - raise SplitNotFoundError("The config or the split does not exist in the dataset") - # get the features - try: - info = get_dataset_config_info( - path=dataset, - config_name=config, - use_auth_token=use_auth_token, - ) - except Exception as err: - raise InfoError("The info cannot be fetched for the dataset config.", cause=err) from err - if not info.features: - try: - # https://github.com/huggingface/datasets/blob/f5826eff9b06ab10dba1adfa52543341ef1e6009/src/datasets/iterable_dataset.py#L1255 - iterable_dataset = load_dataset( - path=dataset, - name=config, - split=split, - streaming=True, - use_auth_token=use_auth_token, - ) - if not isinstance(iterable_dataset, IterableDataset): - raise TypeError("load_dataset should return an IterableDataset") - iterable_dataset = iterable_dataset._resolve_features() - if not isinstance(iterable_dataset, IterableDataset): - raise TypeError("load_dataset should return an IterableDataset") - features = iterable_dataset.features - except Exception as err: - raise FeaturesError("The split features (columns) cannot be extracted.", cause=err) from err - else: - features = info.features - # get the rows - try: - rows = get_rows( - dataset=dataset, - config=config, - split=split, - streaming=True, - rows_max_number=rows_max_number, - use_auth_token=use_auth_token, - ) - except Exception as err: - if info.size_in_bytes is None or info.size_in_bytes > max_size_fallback: - raise StreamingRowsError( - "Cannot load the dataset split (in streaming mode) to extract the first rows.", - cause=err, - ) from err - try: - rows = get_rows( - dataset=dataset, - config=config, - split=split, - streaming=False, - rows_max_number=rows_max_number, - use_auth_token=use_auth_token, - ) - except Exception as err: - raise NormalRowsError( - "Cannot load the dataset split (in normal download mode) to extract the first rows.", - cause=err, - ) from err - # transform the rows, if needed (e.g. save the images or audio to the assets, and return their URL) - try: - transformed_rows = transform_rows( - dataset=dataset, - config=config, - split=split, - rows=rows, - features=features, - assets_base_url=assets_base_url, - assets_directory=assets_directory, - ) - except Exception as err: - raise RowsPostProcessingError( - "Server error while post-processing the split rows. Please report the issue.", - cause=err, - ) from err - # get the size of the surrounding JSON (without the rows) - features_list = to_features_list(dataset=dataset, config=config, split=split, features=features) - response: FirstRowsResponse = { - "dataset": dataset, - "config": config, - "split": split, - "features": features_list, - "rows": [], - } - surrounding_json_size = get_json_size(response) - # truncate the rows to fit within the restrictions, and prepare them as RowItems - row_items = create_truncated_row_items( - dataset=dataset, - config=config, - split=split, - rows=transformed_rows, - min_cell_bytes=min_cell_bytes, - rows_max_bytes=rows_max_bytes - surrounding_json_size, - rows_min_number=rows_min_number, - ) - response["rows"] = row_items - # return the response - return { - "first_rows_response": response, - "dataset_git_revision": dataset_git_revision, - } diff --git a/workers/first_rows/src/first_rows/utils.py b/workers/first_rows/src/first_rows/utils.py deleted file mode 100644 index 202b9ea5..00000000 --- a/workers/first_rows/src/first_rows/utils.py +++ /dev/null @@ -1,164 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import functools -import logging -import time -from enum import Enum -from http import HTTPStatus -from typing import Literal, Optional - -from libcommon.exceptions import CustomError -from libqueue.queue import Queue - -WorkerErrorCode = Literal[ - "DatasetNotFoundError", - "ConfigNotFoundError", - "SplitNotFoundError", - "SplitsNamesError", - "EmptyDatasetError", - "InfoError", - "FeaturesError", - "StreamingRowsError", - "NormalRowsError", - "RowsPostProcessingError", - "UnexpectedError", -] - - -class WorkerCustomError(CustomError): - """Base class for exceptions in this module.""" - - def __init__( - self, - message: str, - status_code: HTTPStatus, - code: WorkerErrorCode, - cause: Optional[BaseException] = None, - disclose_cause: bool = False, - ): - super().__init__(message, status_code, str(code), cause, disclose_cause) - - -class DatasetNotFoundError(WorkerCustomError): - """Raised when the dataset does not exist.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.NOT_FOUND, "DatasetNotFoundError", cause, False) - - -class ConfigNotFoundError(WorkerCustomError): - """Raised when the config does not exist.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.NOT_FOUND, "ConfigNotFoundError", cause, False) - - -class SplitNotFoundError(WorkerCustomError): - """Raised when the split does not exist.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.NOT_FOUND, "SplitNotFoundError", cause, False) - - -class SplitsNamesError(WorkerCustomError): - """Raised when the split names could not be fetched.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "SplitsNamesError", cause, True) - - -class EmptyDatasetError(WorkerCustomError): - """Raised when the dataset has no data.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) - - -class InfoError(WorkerCustomError): - """Raised when the info could not be fetched.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "InfoError", cause, True) - - -class FeaturesError(WorkerCustomError): - """Raised when the features could not be fetched.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "FeaturesError", cause, True) - - -class StreamingRowsError(WorkerCustomError): - """Raised when the rows could not be fetched in streaming mode.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "StreamingRowsError", cause, True) - - -class NormalRowsError(WorkerCustomError): - """Raised when the rows could not be fetched in normal mode.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "NormalRowsError", cause, True) - - -class RowsPostProcessingError(WorkerCustomError): - """Raised when the rows could not be post-processed successfully.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "RowsPostProcessingError", cause, False) - - -class UnexpectedError(WorkerCustomError): - """Raised when the response for the split has not been found.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "UnexpectedError", cause, False) - - -def retry(): - def decorator_retry(func): - """retries with an increasing sleep before every attempt""" - SLEEPS = [1, 7, 70, 7 * 60, 70 * 60] - MAX_ATTEMPTS = len(SLEEPS) - - @functools.wraps(func) - def decorator(*args, **kwargs): - attempt = 0 - last_err = None - while attempt < MAX_ATTEMPTS: - try: - """always sleep before calling the function. It will prevent rate limiting in the first place""" - duration = SLEEPS[attempt] - logging.info(f"Sleep during {duration} seconds to preventively mitigate rate limiting.") - time.sleep(duration) - return func(*args, **kwargs) - except ConnectionError as err: - logging.info("Got a ConnectionError, possibly due to rate limiting. Let's retry.") - last_err = err - attempt += 1 - raise RuntimeError(f"Give up after {attempt} attempts with ConnectionError") from last_err - - return decorator - - return decorator_retry - - -class JobType(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" - - -class Queues: - splits: Queue - first_rows: Queue - - def __init__(self, max_jobs_per_namespace: Optional[int] = None): - self.splits = Queue(type=JobType.SPLITS.value, max_jobs_per_namespace=max_jobs_per_namespace) - self.first_rows = Queue(type=JobType.FIRST_ROWS.value, max_jobs_per_namespace=max_jobs_per_namespace) - - -class CacheKind(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index bed2925e..39cf2b01 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -3,0 +4 @@ +import functools @@ -4,0 +6 @@ import importlib.metadata +import itertools @@ -5,0 +8 @@ import logging +import time @@ -7,15 +10,10 @@ from http import HTTPStatus -from typing import Optional - -from libcache.simple_cache import get_response_without_content, upsert_response -from libqueue.worker import Worker - -from first_rows.config import WorkerConfig -from first_rows.response import compute_first_rows_response, get_dataset_git_revision -from first_rows.utils import ( - CacheKind, - ConfigNotFoundError, - DatasetNotFoundError, - Queues, - SplitNotFoundError, - UnexpectedError, - WorkerCustomError, +from typing import Any, List, Literal, Mapping, Optional, TypedDict, Union + +from datasets import ( + Dataset, + Features, + IterableDataset, + get_dataset_config_info, + get_dataset_config_names, + get_dataset_split_names, + load_dataset, @@ -22,0 +21,4 @@ from first_rows.utils import ( +from datasets.data_files import EmptyDatasetError as _EmptyDatasetError +from libcommon.exceptions import CustomError +from libcommon.utils import orjson_dumps +from libcommon.worker import ConfigNotFoundError, SplitNotFoundError, Worker @@ -23,0 +26,2 @@ from first_rows.utils import ( +from first_rows.config import AppConfig, CacheConfig, FirstRowsConfig +from first_rows.features import get_cell_value @@ -25,52 +29,9 @@ from first_rows.utils import ( -class FirstRowsWorker(Worker): - config: WorkerConfig - - def __init__(self, worker_config: WorkerConfig): - super().__init__(queue_config=worker_config.queue, version=importlib.metadata.version(__package__)) - self._queues = Queues(max_jobs_per_namespace=worker_config.queue.max_jobs_per_namespace) - self.config = worker_config - - @property - def queue(self): - return self._queues.first_rows - - def should_skip_job( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> bool: - """Return True if the job should be skipped, False otherwise. - - The job must be skipped if: - - force is False - - and a cache entry exists for the dataset - - and the result was successful - - and it has been created with the same major version of the worker - - and it has been created with the exact same git commit of the dataset repository - - Args: - dataset (:obj:`str`): The name of the dataset. - config (:obj:`str`, `optional`): The name of the configuration. - split (:obj:`str`, `optional`): The name of the split. - force (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to force the job to be run. - - Returns: - :obj:`bool`: True if the job should be skipped, False otherwise. - """ - if force or config is None or split is None: - return False - try: - cached_response = get_response_without_content( - kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split - ) - dataset_git_revision = get_dataset_git_revision( - dataset=dataset, hf_endpoint=self.config.common.hf_endpoint, hf_token=self.config.common.hf_token - ) - return ( - # TODO: use "error_code" to decide if the job should be skipped (ex: retry if temporary error) - cached_response["http_status"] == HTTPStatus.OK - and cached_response["worker_version"] is not None - and self.compare_major_version(cached_response["worker_version"]) == 0 - and cached_response["dataset_git_revision"] is not None - and cached_response["dataset_git_revision"] == dataset_git_revision - ) - except Exception: - return False +FirstRowsWorkerErrorCode = Literal[ + "SplitsNamesError", + "EmptyDatasetError", + "InfoError", + "FeaturesError", + "StreamingRowsError", + "NormalRowsError", + "RowsPostProcessingError", +] @@ -78 +39,5 @@ class FirstRowsWorker(Worker): - def compute( + +class FirstRowsWorkerError(CustomError): + """Base class for exceptions in this module.""" + + def __init__( @@ -80,35 +45,254 @@ class FirstRowsWorker(Worker): - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, - force: bool = False, - ) -> bool: - if config is None or split is None: - raise ValueError("config and split are required") - try: - result = compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=self.config.common.assets_base_url, - hf_endpoint=self.config.common.hf_endpoint, - hf_token=self.config.common.hf_token, - min_cell_bytes=self.config.first_rows.min_cell_bytes, - max_size_fallback=self.config.first_rows.fallback_max_dataset_size, - rows_max_bytes=self.config.first_rows.max_bytes, - rows_max_number=self.config.first_rows.max_number, - rows_min_number=self.config.first_rows.min_number, - assets_directory=self.config.cache.assets_directory, - ) - upsert_response( - kind=CacheKind.FIRST_ROWS.value, - dataset=dataset, - config=config, - split=split, - content=dict(result["first_rows_response"]), - http_status=HTTPStatus.OK, - worker_version=self.version, - dataset_git_revision=result["dataset_git_revision"], - ) - logging.debug(f"dataset={dataset} config={config} split={split} is valid, cache updated") - return True - except (DatasetNotFoundError, ConfigNotFoundError, SplitNotFoundError): + message: str, + status_code: HTTPStatus, + code: FirstRowsWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__(message, status_code, str(code), cause, disclose_cause) + + +class SplitsNamesError(FirstRowsWorkerError): + """Raised when the split names could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "SplitsNamesError", cause, True) + + +class EmptyDatasetError(FirstRowsWorkerError): + """Raised when the dataset has no data.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) + + +class InfoError(FirstRowsWorkerError): + """Raised when the info could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "InfoError", cause, True) + + +class FeaturesError(FirstRowsWorkerError): + """Raised when the features could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "FeaturesError", cause, True) + + +class StreamingRowsError(FirstRowsWorkerError): + """Raised when the rows could not be fetched in streaming mode.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "StreamingRowsError", cause, True) + + +class NormalRowsError(FirstRowsWorkerError): + """Raised when the rows could not be fetched in normal mode.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "NormalRowsError", cause, True) + + +class RowsPostProcessingError(FirstRowsWorkerError): + """Raised when the rows could not be post-processed successfully.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "RowsPostProcessingError", cause, False) + + +def retry(): + def decorator_retry(func): + """retries with an increasing sleep before every attempt""" + SLEEPS = [1, 7, 70, 7 * 60, 70 * 60] + MAX_ATTEMPTS = len(SLEEPS) + + @functools.wraps(func) + def decorator(*args, **kwargs): + attempt = 0 + last_err = None + while attempt < MAX_ATTEMPTS: + try: + """always sleep before calling the function. It will prevent rate limiting in the first place""" + duration = SLEEPS[attempt] + logging.info(f"Sleep during {duration} seconds to preventively mitigate rate limiting.") + time.sleep(duration) + return func(*args, **kwargs) + except ConnectionError as err: + logging.info("Got a ConnectionError, possibly due to rate limiting. Let's retry.") + last_err = err + attempt += 1 + raise RuntimeError(f"Give up after {attempt} attempts with ConnectionError") from last_err + + return decorator + + return decorator_retry + + +Row = Mapping[str, Any] + + +class FeatureItem(TypedDict): + feature_idx: int + name: str + type: Mapping[str, Any] + + +class RowItem(TypedDict): + row_idx: int + row: Mapping[str, Any] + truncated_cells: List[str] + + +class FirstRowsResponse(TypedDict): + dataset: str + config: str + split: str + features: List[FeatureItem] + rows: List[RowItem] + + +@retry() +def get_rows( + dataset: str, + config: str, + split: str, + streaming: bool, + rows_max_number: int, + use_auth_token: Union[bool, str, None] = False, +) -> List[Row]: + ds = load_dataset( + dataset, + name=config, + split=split, + streaming=streaming, + use_auth_token=use_auth_token, + ) + if streaming: + if not isinstance(ds, IterableDataset): + raise TypeError("load_dataset should return an IterableDataset in streaming mode") + elif not isinstance(ds, Dataset): + raise TypeError("load_dataset should return a Dataset in normal mode") + rows_plus_one = list(itertools.islice(ds, rows_max_number + 1)) + # ^^ to be able to detect if a split has exactly ROWS_MAX_NUMBER rows + if len(rows_plus_one) <= rows_max_number: + logging.debug(f"all the rows in the split have been fetched ({len(rows_plus_one)})") + else: + logging.debug(f"the rows in the split have been truncated ({rows_max_number} rows)") + return rows_plus_one[:rows_max_number] + + +def get_json_size(obj: Any) -> int: + """Returns the size of an object in bytes once serialized as JSON + + Args: + obj (Any): the Python object + + Returns: + int: the size of the serialized object in bytes + """ + return len(orjson_dumps(obj)) + + +# from https://stackoverflow.com/a/43848928/7351594 +def utf8_lead_byte(b: int) -> bool: + """A UTF-8 intermediate byte starts with the bits 10xxxxxx.""" + return (b & 0xC0) != 0x80 + + +def utf8_byte_truncate(text: str, max_bytes: int) -> str: + """If text[max_bytes] is not a lead byte, back up until a lead byte is + found and truncate before that character.""" + utf8 = text.encode("utf8") + if len(utf8) <= max_bytes: + return text + i = max_bytes + while i > 0 and not utf8_lead_byte(utf8[i]): + i -= 1 + return utf8[:i].decode("utf8", "ignore") + + +# Mutates row_item, and returns it anyway +def truncate_row_item(row_item: RowItem, min_cell_bytes: int) -> RowItem: + row = {} + for column_name, cell in row_item["row"].items(): + # for now: all the cells above min_cell_bytes are truncated to min_cell_bytes + # it's done by replacing the cell (which can have any type) by a string with + # its JSON serialization, and then truncating it to min_cell_bytes + cell_json = orjson_dumps(cell) + if len(cell_json) <= min_cell_bytes: + row[column_name] = cell + else: + cell_json_str = cell_json.decode("utf8", "ignore") + row_item["truncated_cells"].append(column_name) + row[column_name] = utf8_byte_truncate(text=cell_json_str, max_bytes=min_cell_bytes) + row_item["row"] = row + return row_item + + +COMMA_SIZE = 1 # the comma "," is encoded with one byte in utf-8 + + +# Mutates row_items, and returns them anyway +def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_bytes: int) -> List[RowItem]: + # compute the current size + rows_bytes = sum(get_json_size(row_item) for row_item in row_items) + COMMA_SIZE * (len(row_items) - 1) + + # Loop backwards, so that the last rows are truncated first + for row_item in reversed(row_items): + if rows_bytes < rows_max_bytes: + break + previous_size = get_json_size(row_item) + COMMA_SIZE + row_item = truncate_row_item(row_item=row_item, min_cell_bytes=min_cell_bytes) + new_size = get_json_size(row_item) + COMMA_SIZE + rows_bytes += new_size - previous_size + row_idx = row_item["row_idx"] + logging.debug(f"the size of the rows is now ({rows_bytes}) after truncating row idx={row_idx}") + return row_items + + +def to_row_item(row_idx: int, row: Row) -> RowItem: + return { + "row_idx": row_idx, + "row": row, + "truncated_cells": [], + } + + +def create_truncated_row_items( + rows: List[Row], + min_cell_bytes: int, + rows_max_bytes: int, + rows_min_number: int, +) -> List[RowItem]: + row_items = [] + rows_bytes = 0 + + # two restrictions must be enforced: + # - at least rows_min_number rows + # - at most rows_max_bytes bytes. Note that it's the limit to the sum of the rows sizes. The JSON response size + # will be greater, due to the other fields (row_idx, truncated_cells, features, etc.). + # To enforce this: + # 1. first get the first rows_min_number rows + for row_idx, row in enumerate(rows[:rows_min_number]): + row_item = to_row_item(row_idx=row_idx, row=row) + rows_bytes += get_json_size(row_item) + COMMA_SIZE + row_items.append(row_item) + + # 2. if the total is over the bytes limit, truncate the values, iterating backwards starting + # from the last rows, until getting under the threshold + # caveat: the truncation might not be enough to get under the threshold if: + # - the number of columns is too high + # - rows_max_bytes is too low (or even negative) + if rows_bytes >= rows_max_bytes: + logging.debug( + f"the size of the first {rows_min_number} rows ({rows_bytes}) is above the max number of bytes" + f" ({rows_max_bytes}), they will be truncated" + ) + return truncate_row_items(row_items=row_items, min_cell_bytes=min_cell_bytes, rows_max_bytes=rows_max_bytes) + + # 3. else: add the remaining rows until the end, or until the bytes threshold + for idx, row in enumerate(rows[rows_min_number:]): + row_idx = rows_min_number + idx + row_item = to_row_item(row_idx=row_idx, row=row) + rows_bytes += get_json_size(row_item) + COMMA_SIZE + if rows_bytes >= rows_max_bytes: @@ -116 +300,2 @@ class FirstRowsWorker(Worker): - f"the dataset={dataset}, config {config} or split {split} could not be found, don't update the cache" + f"the rows in the split have been truncated to {row_idx} row(s) to keep the size" + f" ({rows_bytes}) under the limit ({rows_max_bytes})" @@ -118,4 +303,17 @@ class FirstRowsWorker(Worker): - return False - except WorkerCustomError as err: - upsert_response( - kind=CacheKind.FIRST_ROWS.value, + break + row_items.append(row_item) + return row_items + + +def transform_rows( + dataset: str, + config: str, + split: str, + rows: List[Row], + features: Features, + assets_base_url: str, + assets_directory: str, +) -> List[Row]: + return [ + { + featureName: get_cell_value( @@ -125,4 +323,6 @@ class FirstRowsWorker(Worker): - content=dict(err.as_response()), - http_status=err.status_code, - error_code=err.code, - details=dict(err.as_response_with_cause()), + row_idx=row_idx, + cell=row[featureName] if featureName in row else None, + featureName=featureName, + fieldType=fieldType, + assets_base_url=assets_base_url, + assets_directory=assets_directory, @@ -130,2 +330,141 @@ class FirstRowsWorker(Worker): - logging.debug( - f"first-rows response for dataset={dataset} config={config} split={split} had an error, cache updated" + for (featureName, fieldType) in features.items() + } + for row_idx, row in enumerate(rows) + ] + + +# in JSON, dicts do not carry any order, so we need to return a list +# +# > An object is an *unordered* collection of zero or more name/value pairs, where a name is a string and a value +# is a string, number, boolean, null, object, or array. +# > An array is an *ordered* sequence of zero or more values. +# > The terms "object" and "array" come from the conventions of JavaScript. +# from https://stackoverflow.com/a/7214312/7351594 / https://www.rfc-editor.org/rfc/rfc7159.html +def to_features_list(features: Features) -> List[FeatureItem]: + features_dict = features.to_dict() + return [ + { + "feature_idx": idx, + "name": name, + "type": features_dict[name], + } + for idx, name in enumerate(features) + ] + + +class SplitFullName(TypedDict): + dataset: str + config: str + split: str + + +def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, None] = False) -> List[SplitFullName]: + logging.info(f"get dataset '{dataset}' split full names") + return [ + {"dataset": dataset, "config": config, "split": split} + for config in get_dataset_config_names(path=dataset, use_auth_token=use_auth_token) + for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) + ] + + +def compute_first_rows_response( + dataset: str, + config: str, + split: str, + assets_base_url: str, + hf_token: Optional[str], + min_cell_bytes: int, + max_size_fallback: int, + rows_max_bytes: int, + rows_max_number: int, + rows_min_number: int, + assets_directory: str, +) -> FirstRowsResponse: + """ + Get the response of /first-rows for one specific split of a dataset from huggingface.co. + Dataset can be private or gated if you pass an acceptable token. + + It is assumed that the dataset exist and can be accessed using the token. + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + config (`str`): + A configuration name. + split (`str`): + A split name. + assets_base_url (`str`): + The base url of the assets. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str` or `None`): + An authentication token (See https://huggingface.co/settings/token) + max_size_fallback (`int`): + The maximum number of bytes of the split to fallback to normal mode if the streaming mode fails. + rows_max_bytes (`int`): + The maximum number of bytes of the response (else, the response is truncated). + rows_max_number (`int`): + The maximum number of rows of the response. + rows_min_number (`int`): + The minimum number of rows of the response. + Returns: + [`FirstRowsResponse`]: The list of first rows of the split. + <Tip> + Raises the following errors: + - [`~libcommon.worker.ConfigNotFoundError`] + If the config does not exist in the dataset. + - [`~libcommon.worker.SplitNotFoundError`] + If the split does not exist in the dataset. + - [`~worker.utils.InfoError`] + If the config info could not be obtained using the datasets library. + - [`~worker.utils.FeaturesError`] + If the split features could not be obtained using the datasets library. + - [`~worker.utils.StreamingRowsError`] + If the split rows could not be obtained using the datasets library in streaming mode. + - [`~worker.utils.NormalRowsError`] + If the split rows could not be obtained using the datasets library in normal mode. + - [`~worker.utils.RowsPostProcessingError`] + If the post-processing of the split rows failed, e.g. while saving the images or audio files to the assets. + </Tip> + """ + logging.info(f"get first-rows for dataset={dataset} config={config} split={split}") + use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False + # first ensure the tuple (dataset, config, split) exists on the Hub + # get the list of splits + try: + split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) + except _EmptyDatasetError as err: + raise EmptyDatasetError("The dataset is empty.", cause=err) from err + except Exception as err: + raise SplitsNamesError("Cannot get the split names for the dataset.", cause=err) from err + # ^ can raise DatasetNotFoundError or SplitsNamesError + if config not in [split_full_name["config"] for split_full_name in split_full_names]: + raise ConfigNotFoundError(f"config {config} does not exist for dataset {dataset}") + if {"dataset": dataset, "config": config, "split": split} not in [ + { + "dataset": split_full_name["dataset"], + "config": split_full_name["config"], + "split": split_full_name["split"], + } + for split_full_name in split_full_names + ]: + raise SplitNotFoundError("The config or the split does not exist in the dataset") + # get the features + try: + info = get_dataset_config_info( + path=dataset, + config_name=config, + use_auth_token=use_auth_token, + ) + except Exception as err: + raise InfoError("The info cannot be fetched for the dataset config.", cause=err) from err + if not info.features: + try: + # https://github.com/huggingface/datasets/blob/f5826eff9b06ab10dba1adfa52543341ef1e6009/src/datasets/iterable_dataset.py#L1255 + iterable_dataset = load_dataset( + path=dataset, + name=config, + split=split, + streaming=True, + use_auth_token=use_auth_token, @@ -133 +472,6 @@ class FirstRowsWorker(Worker): - return False + if not isinstance(iterable_dataset, IterableDataset): + raise TypeError("load_dataset should return an IterableDataset") + iterable_dataset = iterable_dataset._resolve_features() + if not isinstance(iterable_dataset, IterableDataset): + raise TypeError("load_dataset should return an IterableDataset") + features = iterable_dataset.features @@ -135,3 +479,21 @@ class FirstRowsWorker(Worker): - e = UnexpectedError(str(err), err) - upsert_response( - kind=CacheKind.FIRST_ROWS.value, + raise FeaturesError("The split features (columns) cannot be extracted.", cause=err) from err + else: + features = info.features + # get the rows + try: + rows = get_rows( + dataset=dataset, + config=config, + split=split, + streaming=True, + rows_max_number=rows_max_number, + use_auth_token=use_auth_token, + ) + except Exception as err: + if info.size_in_bytes is None or info.size_in_bytes > max_size_fallback: + raise StreamingRowsError( + "Cannot load the dataset split (in streaming mode) to extract the first rows.", + cause=err, + ) from err + try: + rows = get_rows( @@ -141,8 +503,3 @@ class FirstRowsWorker(Worker): - content=dict(e.as_response()), - http_status=e.status_code, - error_code=e.code, - details=dict(e.as_response_with_cause()), - ) - logging.debug( - f"first-rows response for dataset={dataset} config={config} split={split} had a server" - " error, cache updated" + streaming=False, + rows_max_number=rows_max_number, + use_auth_token=use_auth_token, @@ -150 +507,81 @@ class FirstRowsWorker(Worker): - return False + except Exception as err: + raise NormalRowsError( + "Cannot load the dataset split (in normal download mode) to extract the first rows.", + cause=err, + ) from err + # transform the rows, if needed (e.g. save the images or audio to the assets, and return their URL) + try: + transformed_rows = transform_rows( + dataset=dataset, + config=config, + split=split, + rows=rows, + features=features, + assets_base_url=assets_base_url, + assets_directory=assets_directory, + ) + except Exception as err: + raise RowsPostProcessingError( + "Server error while post-processing the split rows. Please report the issue.", + cause=err, + ) from err + # get the size of the surrounding JSON (without the rows) + features_list = to_features_list(features=features) + response: FirstRowsResponse = { + "dataset": dataset, + "config": config, + "split": split, + "features": features_list, + "rows": [], + } + surrounding_json_size = get_json_size(response) + # truncate the rows to fit within the restrictions, and prepare them as RowItems + row_items = create_truncated_row_items( + rows=transformed_rows, + min_cell_bytes=min_cell_bytes, + rows_max_bytes=rows_max_bytes - surrounding_json_size, + rows_min_number=rows_min_number, + ) + response["rows"] = row_items + # return the response + return response + + +class FirstRowsWorker(Worker): + cache_config: CacheConfig + first_rows_config: FirstRowsConfig + + def __init__(self, app_config: AppConfig, endpoint: str): + super().__init__( + processing_step=app_config.processing_graph.graph.get_step(endpoint), + # ^ raises if the step is not found + common_config=app_config.common, + queue_config=app_config.queue, + worker_config=app_config.worker, + version=importlib.metadata.version(__package__), + ) + self.cache_config = app_config.cache + self.first_rows_config = app_config.first_rows + + def compute( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + force: bool = False, + ) -> Mapping[str, Any]: + if config is None or split is None: + raise ValueError("config and split are required") + return compute_first_rows_response( + dataset=dataset, + config=config, + split=split, + assets_base_url=self.common_config.assets_base_url, + hf_token=self.common_config.hf_token, + min_cell_bytes=self.first_rows_config.min_cell_bytes, + max_size_fallback=self.first_rows_config.fallback_max_dataset_size, + rows_max_bytes=self.first_rows_config.max_bytes, + rows_max_number=self.first_rows_config.max_number, + rows_min_number=self.first_rows_config.min_number, + assets_directory=self.cache_config.assets_directory, + ) diff --git a/workers/first_rows/tests/conftest.py b/workers/first_rows/tests/conftest.py index 70c7ab83..6014ab77 100644 --- a/workers/first_rows/tests/conftest.py +++ b/workers/first_rows/tests/conftest.py @@ -6 +6 @@ from pytest import MonkeyPatch, fixture -from first_rows.config import WorkerConfig +from first_rows.config import AppConfig @@ -28,3 +28,3 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): -def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: - worker_config = WorkerConfig() - if "test" not in worker_config.cache.mongo_database or "test" not in worker_config.queue.mongo_database: +def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: + app_config = AppConfig() + if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: @@ -32 +32 @@ def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: - return worker_config + return app_config diff --git a/workers/first_rows/tests/fixtures/datasets.py b/workers/first_rows/tests/fixtures/datasets.py index c1bc51f9..fde51675 100644 --- a/workers/first_rows/tests/fixtures/datasets.py +++ b/workers/first_rows/tests/fixtures/datasets.py @@ -6 +6 @@ from pathlib import Path -from typing import Any, Dict +from typing import Any, Mapping @@ -42 +42 @@ def other(content: Any, feature_type: FeatureType = None) -> Dataset: -def datasets() -> Dict[str, Dataset]: +def datasets() -> Mapping[str, Dataset]: diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/first_rows/tests/fixtures/hub.py index 2129014d..31489c83 100644 --- a/workers/first_rows/tests/fixtures/hub.py +++ b/workers/first_rows/tests/fixtures/hub.py @@ -9 +9 @@ from pathlib import Path -from typing import Any, Dict, Iterable, List, Optional, TypedDict +from typing import Any, Iterable, List, Mapping, Optional, Tuple, TypedDict @@ -22 +22,6 @@ from huggingface_hub.hf_api import ( -from ..utils import get_default_config_split + +def get_default_config_split(dataset: str) -> Tuple[str, str, str]: + config = dataset.replace("/", "--") + split = "train" + return dataset, config, split + @@ -55 +60 @@ def update_repo_settings( -) -> Dict[str, bool]: +) -> Mapping[str, bool]: @@ -230 +235 @@ def hub_public_jsonl(hf_api: HfApi, hf_token: str, jsonl_path: str) -> Iterable[ -def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -> Iterable[str]: +def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: @@ -238 +243 @@ def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -def hub_public_image(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -> Iterable[str]: +def hub_public_image(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: @@ -246 +251 @@ def hub_public_image(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -> Iterable[str]: +def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: @@ -256 +261 @@ def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dat -def hub_public_big(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -> Iterable[str]: +def hub_public_big(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: @@ -269 +274 @@ class HubDatasetTest(TypedDict): -HubDatasets = Dict[str, HubDatasetTest] +HubDatasets = Mapping[str, HubDatasetTest] @@ -287 +292 @@ def create_splits_response(dataset: str, num_bytes: float = None, num_examples: -def create_first_rows_response(dataset: str, cols: Dict[str, Any], rows: List[Any]): +def create_first_rows_response(dataset: str, cols: Mapping[str, Any], rows: List[Any]): diff --git a/workers/first_rows/tests/test_features.py b/workers/first_rows/tests/test_features.py index 7cf6dd8f..58f46fa1 100644 --- a/workers/first_rows/tests/test_features.py +++ b/workers/first_rows/tests/test_features.py @@ -5 +5 @@ import datetime -from typing import Any, Dict +from typing import Any, Mapping @@ -12 +12 @@ from datasets import Audio, Dataset, Image, Value -from first_rows.config import WorkerConfig +from first_rows.config import AppConfig @@ -55 +55,5 @@ def test_value( - dataset_type: str, output_value: Any, output_dtype: str, datasets: Dict[str, Dataset], worker_config: WorkerConfig + dataset_type: str, + output_value: Any, + output_dtype: str, + datasets: Mapping[str, Dataset], + app_config: AppConfig, @@ -69,2 +73,2 @@ def test_value( - assets_base_url=worker_config.common.assets_base_url, - assets_directory=worker_config.cache.assets_directory, + assets_base_url=app_config.common.assets_base_url, + assets_directory=app_config.cache.assets_directory, @@ -292 +296,5 @@ def test_others( - dataset_type: str, output_value: Any, output_type: Any, datasets: Dict[str, Dataset], worker_config: WorkerConfig + dataset_type: str, + output_value: Any, + output_type: Any, + datasets: Mapping[str, Dataset], + app_config: AppConfig, @@ -308,2 +316,2 @@ def test_others( - assets_base_url=worker_config.common.assets_base_url, - assets_directory=worker_config.cache.assets_directory, + assets_base_url=app_config.common.assets_base_url, + assets_directory=app_config.cache.assets_directory, diff --git a/workers/first_rows/tests/test_response.py b/workers/first_rows/tests/test_response.py deleted file mode 100644 index 639d3718..00000000 --- a/workers/first_rows/tests/test_response.py +++ /dev/null @@ -1,132 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import pytest -from datasets.packaged_modules import csv -from libcommon.exceptions import CustomError - -from first_rows.config import WorkerConfig -from first_rows.response import compute_first_rows_response, get_json_size - -from .fixtures.hub import HubDatasets -from .utils import get_default_config_split - - [email protected]( - "name,use_token,error_code,cause", - [ - ("public", False, None, None), - ("audio", False, None, None), - ("image", False, None, None), - ("images_list", False, None, None), - ("jsonl", False, None, None), - ("gated", True, None, None), - ("private", True, None, None), - ("empty", False, "EmptyDatasetError", "EmptyDatasetError"), - ("does_not_exist", False, "DatasetNotFoundError", None), - ("gated", False, "DatasetNotFoundError", None), - ("private", False, "DatasetNotFoundError", None), - ], -) -def test_number_rows( - hub_datasets: HubDatasets, - name: str, - use_token: bool, - error_code: str, - cause: str, - worker_config: WorkerConfig, -) -> None: - # temporary patch to remove the effect of - # https://github.com/huggingface/datasets/issues/4875#issuecomment-1280744233 - # note: it fixes the tests, but it does not fix the bug in the "real world" - if hasattr(csv, "_patched_for_streaming") and csv._patched_for_streaming: # type: ignore - csv._patched_for_streaming = False # type: ignore - - dataset = hub_datasets[name]["name"] - expected_first_rows_response = hub_datasets[name]["first_rows_response"] - dataset, config, split = get_default_config_split(dataset) - if error_code is None: - result = compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=worker_config.common.assets_base_url, - hf_endpoint=worker_config.common.hf_endpoint, - hf_token=worker_config.common.hf_token if use_token else None, - max_size_fallback=worker_config.first_rows.fallback_max_dataset_size, - rows_max_number=worker_config.first_rows.max_number, - rows_min_number=worker_config.first_rows.min_number, - rows_max_bytes=worker_config.first_rows.max_bytes, - min_cell_bytes=worker_config.first_rows.min_cell_bytes, - assets_directory=worker_config.cache.assets_directory, - ) - assert result["first_rows_response"] == expected_first_rows_response - assert result["dataset_git_revision"] is not None - return - with pytest.raises(CustomError) as exc_info: - compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=worker_config.common.assets_base_url, - hf_endpoint=worker_config.common.hf_endpoint, - hf_token=worker_config.common.hf_token if use_token else None, - max_size_fallback=worker_config.first_rows.fallback_max_dataset_size, - rows_max_number=worker_config.first_rows.max_number, - rows_min_number=worker_config.first_rows.min_number, - rows_max_bytes=worker_config.first_rows.max_bytes, - min_cell_bytes=worker_config.first_rows.min_cell_bytes, - assets_directory=worker_config.cache.assets_directory, - ) - assert exc_info.value.code == error_code - if cause is None: - assert exc_info.value.disclose_cause is False - assert exc_info.value.cause_exception is None - else: - assert exc_info.value.disclose_cause is True - assert exc_info.value.cause_exception == cause - response = exc_info.value.as_response() - assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} - response_dict = dict(response) - # ^ to remove mypy warnings - assert response_dict["cause_exception"] == cause - assert isinstance(response_dict["cause_traceback"], list) - assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" - - [email protected]( - "name,rows_max_bytes,successful_truncation", - [ - # not-truncated public response is 687 bytes - ("public", 10, False), # too small limit, even with truncation - ("public", 1_000, True), # not truncated - # not-truncated big response is 5_885_989 bytes - ("big", 10, False), # too small limit, even with truncation - ("big", 1_000, True), # truncated successfully - ("big", 10_000_000, True), # not truncated - ], -) -def test_truncation( - hub_datasets: HubDatasets, - worker_config: WorkerConfig, - name: str, - rows_max_bytes: int, - successful_truncation: bool, -) -> None: - dataset, config, split = get_default_config_split(hub_datasets[name]["name"]) - response = compute_first_rows_response( - dataset=dataset, - config=config, - split=split, - assets_base_url=worker_config.common.assets_base_url, - hf_endpoint=worker_config.common.hf_endpoint, - hf_token=None, - max_size_fallback=worker_config.first_rows.fallback_max_dataset_size, - rows_max_number=1_000_000, - rows_min_number=10, - rows_max_bytes=rows_max_bytes, - min_cell_bytes=10, - assets_directory=worker_config.cache.assets_directory, - )["first_rows_response"] - print(get_json_size(response)) - assert (get_json_size(response) <= rows_max_bytes) is successful_truncation diff --git a/workers/first_rows/tests/test_worker.py b/workers/first_rows/tests/test_worker.py index 0dd989d5..ab1e68ac 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/first_rows/tests/test_worker.py @@ -8,2 +8,4 @@ import pytest -from libcache.simple_cache import DoesNotExist, _clean_cache_database, get_response -from libqueue.queue import _clean_queue_database +from datasets.packaged_modules import csv +from libcommon.exceptions import CustomError +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_response @@ -11,3 +13,6 @@ from libqueue.queue import _clean_queue_database -from first_rows.config import WorkerConfig -from first_rows.utils import CacheKind -from first_rows.worker import FirstRowsWorker +from first_rows.config import AppConfig +from first_rows.worker import ( + FirstRowsWorker, + compute_first_rows_response, + get_json_size, +) @@ -15 +20 @@ from first_rows.worker import FirstRowsWorker -from .utils import get_default_config_split +from .fixtures.hub import HubDatasets, get_default_config_split @@ -25,2 +30,2 @@ def clean_mongo_database() -> None: -def worker(worker_config: WorkerConfig) -> FirstRowsWorker: - return FirstRowsWorker(worker_config) +def worker(app_config: AppConfig) -> FirstRowsWorker: + return FirstRowsWorker(app_config=app_config, endpoint="/first-rows") @@ -33 +38 @@ def should_skip_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: - worker.compute(dataset=dataset, config=config, split=split) + worker.process(dataset=dataset, config=config, split=split) @@ -40,2 +45,2 @@ def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: - assert worker.compute(dataset=dataset, config=config, split=split) is True - cached_response = get_response(kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split) + assert worker.process(dataset=dataset, config=config, split=split) is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=dataset, config=config, split=split) @@ -58 +63 @@ def test_doesnotexist(worker: FirstRowsWorker) -> None: - assert worker.compute(dataset=dataset, config=config, split=split) is False + assert worker.process(dataset=dataset, config=config, split=split) is False @@ -60 +65 @@ def test_doesnotexist(worker: FirstRowsWorker) -> None: - get_response(kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split) + get_response(kind=worker.processing_step.cache_kind, dataset=dataset, config=config, split=split) @@ -67,0 +73,118 @@ def test_process_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: + + [email protected]( + "name,use_token,error_code,cause", + [ + ("public", False, None, None), + ("audio", False, None, None), + ("image", False, None, None), + ("images_list", False, None, None), + ("jsonl", False, None, None), + ("gated", True, None, None), + ("private", True, None, None), + ("empty", False, "EmptyDatasetError", "EmptyDatasetError"), + # should we really test the following cases? + # The assumption is that the dataset exists and is accessible with the token + ("does_not_exist", False, "SplitsNamesError", "FileNotFoundError"), + ("gated", False, "SplitsNamesError", "FileNotFoundError"), + ("private", False, "SplitsNamesError", "FileNotFoundError"), + ], +) +def test_number_rows( + hub_datasets: HubDatasets, + name: str, + use_token: bool, + error_code: str, + cause: str, + app_config: AppConfig, +) -> None: + # temporary patch to remove the effect of + # https://github.com/huggingface/datasets/issues/4875#issuecomment-1280744233 + # note: it fixes the tests, but it does not fix the bug in the "real world" + if hasattr(csv, "_patched_for_streaming") and csv._patched_for_streaming: # type: ignore + csv._patched_for_streaming = False # type: ignore + + dataset = hub_datasets[name]["name"] + expected_first_rows_response = hub_datasets[name]["first_rows_response"] + dataset, config, split = get_default_config_split(dataset) + if error_code is None: + result = compute_first_rows_response( + dataset=dataset, + config=config, + split=split, + assets_base_url=app_config.common.assets_base_url, + hf_token=app_config.common.hf_token if use_token else None, + max_size_fallback=app_config.first_rows.fallback_max_dataset_size, + rows_max_number=app_config.first_rows.max_number, + rows_min_number=app_config.first_rows.min_number, + rows_max_bytes=app_config.first_rows.max_bytes, + min_cell_bytes=app_config.first_rows.min_cell_bytes, + assets_directory=app_config.cache.assets_directory, + ) + assert result == expected_first_rows_response + return + with pytest.raises(CustomError) as exc_info: + compute_first_rows_response( + dataset=dataset, + config=config, + split=split, + assets_base_url=app_config.common.assets_base_url, + hf_token=app_config.common.hf_token if use_token else None, + max_size_fallback=app_config.first_rows.fallback_max_dataset_size, + rows_max_number=app_config.first_rows.max_number, + rows_min_number=app_config.first_rows.min_number, + rows_max_bytes=app_config.first_rows.max_bytes, + min_cell_bytes=app_config.first_rows.min_cell_bytes, + assets_directory=app_config.cache.assets_directory, + ) + assert exc_info.value.code == error_code + if cause is None: + assert exc_info.value.disclose_cause is False + assert exc_info.value.cause_exception is None + else: + assert exc_info.value.disclose_cause is True + assert exc_info.value.cause_exception == cause + response = exc_info.value.as_response() + assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} + response_dict = dict(response) + # ^ to remove mypy warnings + assert response_dict["cause_exception"] == cause + assert isinstance(response_dict["cause_traceback"], list) + assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" + + [email protected]( + "name,rows_max_bytes,successful_truncation", + [ + # not-truncated public response is 687 bytes + ("public", 10, False), # too small limit, even with truncation + ("public", 1_000, True), # not truncated + # not-truncated big response is 5_885_989 bytes + ("big", 10, False), # too small limit, even with truncation + ("big", 1_000, True), # truncated successfully + ("big", 10_000_000, True), # not truncated + ], +) +def test_truncation( + hub_datasets: HubDatasets, + app_config: AppConfig, + name: str, + rows_max_bytes: int, + successful_truncation: bool, +) -> None: + dataset, config, split = get_default_config_split(hub_datasets[name]["name"]) + response = compute_first_rows_response( + dataset=dataset, + config=config, + split=split, + assets_base_url=app_config.common.assets_base_url, + hf_token=None, + max_size_fallback=app_config.first_rows.fallback_max_dataset_size, + rows_max_number=1_000_000, + rows_min_number=10, + rows_max_bytes=rows_max_bytes, + min_cell_bytes=10, + assets_directory=app_config.cache.assets_directory, + ) + print(get_json_size(response)) + assert (get_json_size(response) <= rows_max_bytes) is successful_truncation diff --git a/workers/first_rows/tests/utils.py b/workers/first_rows/tests/utils.py deleted file mode 100644 index 4e3fdff0..00000000 --- a/workers/first_rows/tests/utils.py +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Tuple - - -def get_default_config_split(dataset: str) -> Tuple[str, str, str]: - config = dataset.replace("/", "--") - split = "train" - return dataset, config, split diff --git a/workers/splits/Dockerfile b/workers/splits/Dockerfile index 455f3800..bdc1edf5 100644 --- a/workers/splits/Dockerfile +++ b/workers/splits/Dockerfile @@ -26,2 +25,0 @@ WORKDIR /src -COPY libs/libcache/dist ./libs/libcache/dist -COPY libs/libqueue/dist ./libs/libqueue/dist diff --git a/workers/splits/README.md b/workers/splits/README.md index 27e20adc..5585339f 100644 --- a/workers/splits/README.md +++ b/workers/splits/README.md @@ -19,8 +18,0 @@ If the Hub is not https://huggingface.co (i.e. if you set the `COMMON_HF_ENDPOIN -### Cache - -See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. - -### Queue - -See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. - diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 5e9fd78a..5bf45816 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -770 +770 @@ name = "importlib-metadata" -version = "5.0.0" +version = "5.1.0" @@ -870,19 +869,0 @@ python-versions = ">=3" -[[package]] -name = "libcache" -version = "0.4.3" -description = "Library for the cache in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -environs = ">=9.5.0,<10.0.0" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} - -[package.source] -type = "file" -url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" - @@ -899 +880 @@ name = "libcommon" -version = "0.3.3" +version = "0.5.0" @@ -905,0 +887 @@ python-versions = "==3.9.6" +appdirs = ">=1.4.4,<2.0.0" @@ -907,16 +889 @@ environs = ">=9.5.0,<10.0.0" -orjson = ">=3.6.4,<4.0.0" - -[package.source] -type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl" - -[[package]] -name = "libqueue" -version = "0.4.13" -description = "Library for the jobs queue in mongodb" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -environs = ">=9.5.0,<10.0.0" +huggingface-hub = ">=0.11.0,<0.12.0" @@ -925 +892 @@ mongoengine = ">=0.24.1,<0.25.0" -packaging = ">=21.3,<22.0" +orjson = ">=3.6.4,<4.0.0" @@ -931 +898 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl" @@ -2460 +2427 @@ python-versions = "3.9.6" -content-hash = "6e12ec014e04f388b4a2fdd4af6be9075e9ecf7aa38f49ec9c167e2c2d2ed33f" +content-hash = "152c9759656a3e25a2a16751584007e49545a1a227320371fec492a39e29ae59" @@ -3252,2 +3219,2 @@ importlib-metadata = [ - {file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"}, - {file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"}, + {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, + {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, @@ -3282,3 +3248,0 @@ kss = [ -libcache = [ - {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, -] @@ -3298,4 +3262 @@ libcommon = [ - {file = "libcommon-0.3.3-py3-none-any.whl", hash = "sha256:b56b6e48bb2c92c3dfc619fc25718db930bc356f1e54fb9f8a67ba4597cb499d"}, -] -libqueue = [ - {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, + {file = "libcommon-0.5.0-py3-none-any.whl", hash = "sha256:0267504716992f562382ff5029ace87444fd12793f2393f3800921d384a0fd52"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index ec39e07f..86223966 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -20,3 +20 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.5.0-py3-none-any.whl", develop = false } diff --git a/workers/splits/src/splits/config.py b/workers/splits/src/splits/config.py index 927d48d8..9ee54c7e 100644 --- a/workers/splits/src/splits/config.py +++ b/workers/splits/src/splits/config.py @@ -6,3 +6,7 @@ from datasets.utils.logging import log_levels, set_verbosity -from libcache.config import CacheConfig -from libcommon.config import CommonConfig -from libqueue.config import QueueConfig +from libcommon.config import ( + CacheConfig, + CommonConfig, + ProcessingGraphConfig, + QueueConfig, + WorkerConfig, +) @@ -11 +15 @@ from libqueue.config import QueueConfig -class WorkerConfig: +class AppConfig: @@ -13,0 +18 @@ class WorkerConfig: + processing_graph: ProcessingGraphConfig @@ -14,0 +20 @@ class WorkerConfig: + worker: WorkerConfig @@ -20,0 +27,2 @@ class WorkerConfig: + self.processing_graph = ProcessingGraphConfig() + self.worker = WorkerConfig() diff --git a/workers/splits/src/splits/main.py b/workers/splits/src/splits/main.py index 79527175..f1c40282 100644 --- a/workers/splits/src/splits/main.py +++ b/workers/splits/src/splits/main.py @@ -4 +4 @@ -from splits.config import WorkerConfig +from splits.config import AppConfig @@ -8,2 +8,3 @@ if __name__ == "__main__": - worker_config = WorkerConfig() - SplitsWorker(worker_config).loop() + app_config = AppConfig() + SPLITS_ENDPOINT = "/splits" + SplitsWorker(app_config=app_config, endpoint=SPLITS_ENDPOINT).loop() diff --git a/workers/splits/src/splits/response.py b/workers/splits/src/splits/response.py deleted file mode 100644 index c223ec2c..00000000 --- a/workers/splits/src/splits/response.py +++ /dev/null @@ -1,164 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from typing import Dict, List, Optional, TypedDict, Union - -from datasets import ( - DatasetInfo, - get_dataset_config_info, - get_dataset_config_names, - get_dataset_split_names, -) -from datasets.data_files import EmptyDatasetError as _EmptyDatasetError -from huggingface_hub.hf_api import HfApi -from huggingface_hub.utils import RepositoryNotFoundError - -from splits.utils import DatasetNotFoundError, EmptyDatasetError, SplitsNamesError - - -class SplitFullName(TypedDict): - dataset: str - config: str - split: str - - -class SplitItem(SplitFullName): - num_bytes: Optional[int] - num_examples: Optional[int] - - -class SplitsResponse(TypedDict): - splits: List[SplitItem] - - -class SplitsResponseResult(TypedDict): - splits_response: SplitsResponse - dataset_git_revision: Optional[str] - - -def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, None] = False) -> List[SplitFullName]: - """Get the list of splits full names (split and config) for a dataset. - - Args: - dataset (str): A dataset name. If the repository is namespaced (a user or an organization), the namespace and - the dataset name are separated with a slash (`/`), for example: `user/dataset`. - use_auth_token (Union[bool, str, None], optional): user token. It allows to retrieve the splits for gated - datasets. Defaults to False (no authentication). - - Returns: - List[SplitFullName]: a list of splits full names: objects with the keys `dataset`, `config` and `split`. They - are sorted alphabetically by configuration (config), but the splits order for a given configuration is - preserved. - """ - logging.info(f"get dataset '{dataset}' split full names") - return [ - {"dataset": dataset, "config": config, "split": split} - for config in sorted(get_dataset_config_names(path=dataset, use_auth_token=use_auth_token)) - for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) - ] - - -def get_dataset_git_revision( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, -) -> Union[str, None]: - """ - Get the git revision of the dataset. - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, *optional*): - An authentication token (See https://huggingface.co/settings/token) - Returns: - `Union[str, None]`: the dataset git revision (sha) if any. - <Tip> - Raises the following errors: - - [`~worker.exceptions.DatasetNotFoundError`] - If the repository to download from cannot be found. This may be because it doesn't exist, - or because it is set to `private` and you do not have access. - </Tip> - """ - try: - dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err - return dataset_info.sha - - -def compute_splits_response( - dataset: str, - hf_endpoint: str, - hf_token: Optional[str] = None, -) -> SplitsResponseResult: - """ - Get the response of /splits for one specific dataset on huggingface.co. - Dataset can be private or gated if you pass an acceptable token. - Args: - dataset (`str`): - A namespace (user or an organization) and a repo name separated - by a `/`. - hf_endpoint (`str`): - The Hub endpoint (for example: "https://huggingface.co") - hf_token (`str`, *optional*): - An authentication token (See https://huggingface.co/settings/token) - Returns: - `SplitsResponseResult`: An object with the splits_response - (list of splits names) and the dataset_git_revision (sha) if any. - <Tip> - Raises the following errors: - - [`~worker.exceptions.DatasetNotFoundError`] - If the repository to download from cannot be found. This may be because it doesn't exist, - or because it is set to `private` and you do not have access. - - [`~worker.exceptions.SplitsNamesError`] - If the list of splits could not be obtained using the datasets library. - </Tip> - """ - logging.info(f"get splits for dataset={dataset}") - use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False - # first try to get the dataset config info. It raises if the dataset does not exist or is private - dataset_git_revision = get_dataset_git_revision(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) - # get the list of splits - try: - split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) - except _EmptyDatasetError as err: - raise EmptyDatasetError("The dataset is empty.", cause=err) from err - except Exception as err: - raise SplitsNamesError("Cannot get the split names for the dataset.", cause=err) from err - # get the number of bytes and examples for each split - config_info: Dict[str, DatasetInfo] = {} - split_items: List[SplitItem] = [] - for split_full_name in split_full_names: - dataset = split_full_name["dataset"] - config = split_full_name["config"] - split = split_full_name["split"] - try: - if config not in config_info: - config_info[config] = get_dataset_config_info( - path=dataset, - config_name=config, - use_auth_token=use_auth_token, - ) - info = config_info[config] - num_bytes = info.splits[split].num_bytes if info.splits else None - num_examples = info.splits[split].num_examples if info.splits else None - except Exception: - num_bytes = None - num_examples = None - split_items.append( - { - "dataset": dataset, - "config": config, - "split": split, - "num_bytes": num_bytes, - "num_examples": num_examples, - } - ) - return { - "splits_response": {"splits": split_items}, - "dataset_git_revision": dataset_git_revision, - } diff --git a/workers/splits/src/splits/utils.py b/workers/splits/src/splits/utils.py deleted file mode 100644 index 3092856b..00000000 --- a/workers/splits/src/splits/utils.py +++ /dev/null @@ -1,77 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from enum import Enum -from http import HTTPStatus -from typing import Literal, Optional - -from libcommon.exceptions import CustomError -from libqueue.queue import Queue - -WorkerErrorCode = Literal[ - "DatasetNotFoundError", - "EmptyDatasetError", - "SplitsNamesError", - "UnexpectedError", -] - - -class WorkerCustomError(CustomError): - """Base class for exceptions in this module.""" - - def __init__( - self, - message: str, - status_code: HTTPStatus, - code: WorkerErrorCode, - cause: Optional[BaseException] = None, - disclose_cause: bool = False, - ): - super().__init__(message, status_code, str(code), cause, disclose_cause) - - -class DatasetNotFoundError(WorkerCustomError): - """Raised when the dataset does not exist.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.NOT_FOUND, "DatasetNotFoundError", cause, False) - - -class SplitsNamesError(WorkerCustomError): - """Raised when the split names could not be fetched.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "SplitsNamesError", cause, True) - - -class EmptyDatasetError(WorkerCustomError): - """Raised when the dataset has no data.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) - - -class UnexpectedError(WorkerCustomError): - """Raised when the response for the split has not been found.""" - - def __init__(self, message: str, cause: Optional[BaseException] = None): - super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "UnexpectedError", cause, False) - - -class JobType(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" - - -class Queues: - splits: Queue - first_rows: Queue - - def __init__(self, max_jobs_per_namespace: Optional[int] = None): - self.splits = Queue(type=JobType.SPLITS.value, max_jobs_per_namespace=max_jobs_per_namespace) - self.first_rows = Queue(type=JobType.FIRST_ROWS.value, max_jobs_per_namespace=max_jobs_per_namespace) - - -class CacheKind(Enum): - SPLITS = "/splits" - FIRST_ROWS = "/first-rows" diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index faeaa9c9..ffcc97ff 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -7 +7 @@ from http import HTTPStatus -from typing import Optional +from typing import Any, Dict, List, Literal, Mapping, Optional, TypedDict, Union @@ -9,16 +9,5 @@ from typing import Optional -from libcache.simple_cache import ( - delete_response, - get_dataset_response_ids, - get_response_without_content, - upsert_response, -) -from libqueue.worker import Worker - -from splits.config import WorkerConfig -from splits.response import compute_splits_response, get_dataset_git_revision -from splits.utils import ( - CacheKind, - DatasetNotFoundError, - Queues, - UnexpectedError, - WorkerCustomError, +from datasets import ( + DatasetInfo, + get_dataset_config_info, + get_dataset_config_names, + get_dataset_split_names, @@ -25,0 +15,4 @@ from splits.utils import ( +from datasets.data_files import EmptyDatasetError as _EmptyDatasetError +from libcommon.exceptions import CustomError +from libcommon.simple_cache import delete_response, get_dataset_response_ids +from libcommon.worker import Queue, Worker @@ -26,0 +20 @@ from splits.utils import ( +from splits.config import AppConfig @@ -28,35 +22,118 @@ from splits.utils import ( -class SplitsWorker(Worker): - config: WorkerConfig - - def __init__(self, worker_config: WorkerConfig): - super().__init__(queue_config=worker_config.queue, version=importlib.metadata.version(__package__)) - self._queues = Queues(max_jobs_per_namespace=worker_config.queue.max_jobs_per_namespace) - self.config = worker_config - - @property - def queue(self): - return self._queues.splits - - def should_skip_job( - self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False - ) -> bool: - """Return True if the job should be skipped, False otherwise. - - The job must be skipped if: - - force is False - - and a cache entry exists for the dataset - - and the result was successful - - and it has been created with the same major version of the worker - - and it has been created with the exact same git commit of the dataset repository - - Args: - dataset (:obj:`str`): The name of the dataset. - config (:obj:`str`, `optional`): The name of the configuration. - split (:obj:`str`, `optional`): The name of the split. - force (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to force the job to be run. - - Returns: - :obj:`bool`: True if the job should be skipped, False otherwise. - """ - if force: - return False +SplitsWorkerErrorCode = Literal[ + "EmptyDatasetError", + "SplitsNamesError", +] + + +class SplitWorkerError(CustomError): + """Base class for worker exceptions.""" + + def __init__( + self, + message: str, + status_code: HTTPStatus, + code: SplitsWorkerErrorCode, + cause: Optional[BaseException] = None, + disclose_cause: bool = False, + ): + super().__init__( + message=message, status_code=status_code, code=str(code), cause=cause, disclose_cause=disclose_cause + ) + + +class SplitsNamesError(SplitWorkerError): + """Raised when the split names could not be fetched.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "SplitsNamesError", cause, True) + + +class EmptyDatasetError(SplitWorkerError): + """Raised when the dataset has no data.""" + + def __init__(self, message: str, cause: Optional[BaseException] = None): + super().__init__(message, HTTPStatus.INTERNAL_SERVER_ERROR, "EmptyDatasetError", cause, True) + + +class SplitFullName(TypedDict): + dataset: str + config: str + split: str + + +class SplitItem(SplitFullName): + num_bytes: Optional[int] + num_examples: Optional[int] + + +class SplitsResponseContent(TypedDict): + splits: List[SplitItem] + + +def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, None] = False) -> List[SplitFullName]: + """Get the list of splits full names (split and config) for a dataset. + + Args: + dataset (str): A dataset name. If the repository is namespaced (a user or an organization), the namespace and + the dataset name are separated with a slash (`/`), for example: `user/dataset`. + use_auth_token (Union[bool, str, None], optional): user token. It allows to retrieve the splits for gated + datasets. Defaults to False (no authentication). + + Returns: + List[SplitFullName]: a list of splits full names: objects with the keys `dataset`, `config` and `split`. They + are sorted alphabetically by configuration (config), but the splits order for a given configuration is + preserved. + """ + logging.info(f"get dataset '{dataset}' split full names") + return [ + {"dataset": dataset, "config": config, "split": split} + for config in sorted(get_dataset_config_names(path=dataset, use_auth_token=use_auth_token)) + for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) + ] + + +def compute_splits_response( + dataset: str, + hf_token: Optional[str] = None, +) -> SplitsResponseContent: + """ + Get the response of /splits for one specific dataset on huggingface.co. + Dataset can be private or gated if you pass an acceptable token. + + It is assumed that the dataset exist and can be accessed using the token. + + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `SplitsResponseResult`: An object with the splits_response + (list of splits names) and the dataset_git_revision (sha) if any. + <Tip> + Raises the following errors: + - [`~splits.worker.EmptyDatasetError`] + The dataset is empty. + - [`~splits.worker.SplitsNamesError`] + If the list of splits could not be obtained using the datasets library. + </Tip> + """ + logging.info(f"get splits for dataset={dataset}") + use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False + # get the list of splits + try: + split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) + except _EmptyDatasetError as err: + raise EmptyDatasetError("The dataset is empty.", cause=err) from err + except Exception as err: + raise SplitsNamesError("Cannot get the split names for the dataset.", cause=err) from err + # get the number of bytes and examples for each split + config_info: Dict[str, DatasetInfo] = {} + split_items: List[SplitItem] = [] + for split_full_name in split_full_names: + dataset = split_full_name["dataset"] + config = split_full_name["config"] + split = split_full_name["split"] @@ -64,12 +141,9 @@ class SplitsWorker(Worker): - cached_response = get_response_without_content(kind=CacheKind.SPLITS.value, dataset=dataset) - dataset_git_revision = get_dataset_git_revision( - dataset=dataset, hf_endpoint=self.config.common.hf_endpoint, hf_token=self.config.common.hf_token - ) - return ( - # TODO: use "error_code" to decide if the job should be skipped (ex: retry if temporary error) - cached_response["http_status"] == HTTPStatus.OK - and cached_response["worker_version"] is not None - and self.compare_major_version(cached_response["worker_version"]) == 0 - and cached_response["dataset_git_revision"] is not None - and cached_response["dataset_git_revision"] == dataset_git_revision - ) + if config not in config_info: + config_info[config] = get_dataset_config_info( + path=dataset, + config_name=config, + use_auth_token=use_auth_token, + ) + info = config_info[config] + num_bytes = info.splits[split].num_bytes if info.splits else None + num_examples = info.splits[split].num_examples if info.splits else None @@ -77 +151,24 @@ class SplitsWorker(Worker): - return False + num_bytes = None + num_examples = None + split_items.append( + { + "dataset": dataset, + "config": config, + "split": split, + "num_bytes": num_bytes, + "num_examples": num_examples, + } + ) + return {"splits": split_items} + + +class SplitsWorker(Worker): + def __init__(self, app_config: AppConfig, endpoint: str): + super().__init__( + processing_step=app_config.processing_graph.graph.get_step(endpoint), + # ^ raises if the step is not found + common_config=app_config.common, + queue_config=app_config.queue, + worker_config=app_config.worker, + version=importlib.metadata.version(__package__), + ) @@ -85,62 +182,28 @@ class SplitsWorker(Worker): - ) -> bool: - try: - splits_response_result = compute_splits_response( - dataset=dataset, hf_endpoint=self.config.common.hf_endpoint, hf_token=self.config.common.hf_token - ) - content = splits_response_result["splits_response"] - upsert_response( - kind=CacheKind.SPLITS.value, - dataset=dataset, - content=dict(content), - http_status=HTTPStatus.OK, - worker_version=self.version, - dataset_git_revision=splits_response_result["dataset_git_revision"], - ) - logging.debug(f"dataset={dataset} is valid, cache updated") - - new_splits = [(s["dataset"], s["config"], s["split"]) for s in content["splits"]] - # remove obsolete first-rows responses from the cache - first_rows_responses_in_cache = [ - (s["dataset"], s["config"], s["split"]) - for s in get_dataset_response_ids(dataset=dataset) - if s["kind"] == CacheKind.FIRST_ROWS.value - ] - first_rows_responses_to_delete = [s for s in first_rows_responses_in_cache if s not in new_splits] - for d, c, s in first_rows_responses_to_delete: - delete_response(kind=CacheKind.FIRST_ROWS.value, dataset=d, config=c, split=s) - logging.debug( - f"{len(first_rows_responses_to_delete)} 'first-rows' responses deleted from the cache for obsolete" - f" splits of dataset={dataset}" - ) - # compute the 'first-rows' responses for the new splits - for d, c, s in new_splits: - # we force the refresh of the /first_rows responses if the /splits refresh was forced - self._queues.first_rows.add_job(dataset=d, config=c, split=s, force=force) - logging.debug(f"{len(new_splits)} 'first-rows' jobs added for the splits of dataset={dataset}") - return True - except DatasetNotFoundError: - logging.debug(f"the dataset={dataset} could not be found, don't update the cache") - return False - except WorkerCustomError as err: - upsert_response( - kind=CacheKind.SPLITS.value, - dataset=dataset, - content=dict(err.as_response()), - http_status=err.status_code, - error_code=err.code, - details=dict(err.as_response_with_cause()), - ) - logging.debug(f"splits response for dataset={dataset} had an error, cache updated") - return False - except Exception as err: - e = UnexpectedError(str(err), err) - upsert_response( - kind=CacheKind.SPLITS.value, - dataset=dataset, - content=dict(e.as_response()), - http_status=e.status_code, - error_code=e.code, - details=dict(e.as_response_with_cause()), - ) - logging.debug(f"splits response for dataset={dataset} had a server error, cache updated") - return False + ) -> Mapping[str, Any]: + content = compute_splits_response(dataset=dataset, hf_token=self.common_config.hf_token) + + new_splits = [(s["dataset"], s["config"], s["split"]) for s in content["splits"]] + for step in self.processing_step.children: + if step.input_type == "dataset": + Queue(type=step.job_type).add_job(dataset=dataset, config=config, split=split, force=force) + else: + # remove obsolete responses from the cache + responses_in_cache = [ + (s["dataset"], s["config"], s["split"]) + for s in get_dataset_response_ids(dataset=dataset) + if s["kind"] == step.cache_kind + ] + responses_to_delete = [s for s in responses_in_cache if s not in new_splits] + for d, c, s in responses_to_delete: + delete_response(kind=step.cache_kind, dataset=d, config=c, split=s) + logging.debug( + f"{len(responses_to_delete)} {step.endpoint} responses deleted from the cache for obsolete" + f" splits of dataset={dataset}" + ) + # compute the responses for the new splits + for d, c, s in new_splits: + # we force the refresh of the /first_rows responses if the /splits refresh was forced + Queue(type=step.job_type).add_job(dataset=d, config=c, split=s, force=force) + logging.debug(f"{len(new_splits)} {step.endpoint} jobs added for the splits of dataset={dataset}") + + return content diff --git a/workers/splits/tests/conftest.py b/workers/splits/tests/conftest.py index a8ebe275..827efefe 100644 --- a/workers/splits/tests/conftest.py +++ b/workers/splits/tests/conftest.py @@ -6 +6 @@ from pytest import MonkeyPatch, fixture -from splits.config import WorkerConfig +from splits.config import AppConfig @@ -25,3 +25,3 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): -def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: - worker_config = WorkerConfig() - if "test" not in worker_config.cache.mongo_database or "test" not in worker_config.queue.mongo_database: +def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: + app_config = AppConfig() + if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: @@ -29 +29 @@ def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: - return worker_config + return app_config diff --git a/workers/splits/tests/fixtures/datasets.py b/workers/splits/tests/fixtures/datasets.py index ab1caf1b..e397c2ec 100644 --- a/workers/splits/tests/fixtures/datasets.py +++ b/workers/splits/tests/fixtures/datasets.py @@ -4 +4 @@ -from typing import Any, Dict +from typing import Any, Mapping @@ -20 +20 @@ def other(content: Any, feature_type: FeatureType = None) -> Dataset: -def datasets() -> Dict[str, Dataset]: +def datasets() -> Mapping[str, Dataset]: diff --git a/workers/splits/tests/fixtures/hub.py b/workers/splits/tests/fixtures/hub.py index 0bfe2485..b9032510 100644 --- a/workers/splits/tests/fixtures/hub.py +++ b/workers/splits/tests/fixtures/hub.py @@ -9 +9 @@ from pathlib import Path -from typing import Any, Dict, Iterable, List, Optional, TypedDict +from typing import Any, Iterable, List, Mapping, Optional, Tuple, TypedDict @@ -22 +22,6 @@ from huggingface_hub.hf_api import ( -from ..utils import get_default_config_split + +def get_default_config_split(dataset: str) -> Tuple[str, str, str]: + config = dataset.replace("/", "--") + split = "train" + return dataset, config, split + @@ -55 +60 @@ def update_repo_settings( -) -> Dict[str, bool]: +) -> Mapping[str, bool]: @@ -222 +227 @@ def hub_gated_csv(hf_api: HfApi, hf_token: str, csv_path: str) -> Iterable[str]: -def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -> Iterable[str]: +def hub_public_audio(hf_api: HfApi, hf_token: str, datasets: Mapping[str, Dataset]) -> Iterable[str]: @@ -234 +239 @@ class HubDatasetTest(TypedDict): -HubDatasets = Dict[str, HubDatasetTest] +HubDatasets = Mapping[str, HubDatasetTest] diff --git a/workers/splits/tests/test_response.py b/workers/splits/tests/test_response.py deleted file mode 100644 index 3ab5c8ac..00000000 --- a/workers/splits/tests/test_response.py +++ /dev/null @@ -1,60 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import pytest -from libcommon.exceptions import CustomError - -from splits.config import WorkerConfig -from splits.response import compute_splits_response - -from .fixtures.hub import HubDatasets - - [email protected]( - "name,use_token,error_code,cause", - [ - ("public", False, None, None), - ("audio", False, None, None), - ("gated", True, None, None), - ("private", True, None, None), - ("empty", False, "EmptyDatasetError", "EmptyDatasetError"), - ("does_not_exist", False, "DatasetNotFoundError", None), - ("gated", False, "DatasetNotFoundError", None), - ("private", False, "DatasetNotFoundError", None), - ], -) -def test_compute_splits_response_simple_csv( - hub_datasets: HubDatasets, name: str, use_token: bool, error_code: str, cause: str, worker_config: WorkerConfig -) -> None: - dataset = hub_datasets[name]["name"] - expected_splits_response = hub_datasets[name]["splits_response"] - if error_code is None: - result = compute_splits_response( - dataset=dataset, - hf_endpoint=worker_config.common.hf_endpoint, - hf_token=worker_config.common.hf_token if use_token else None, - ) - assert result["splits_response"] == expected_splits_response - assert result["dataset_git_revision"] is not None - return - - with pytest.raises(CustomError) as exc_info: - compute_splits_response( - dataset=dataset, - hf_endpoint=worker_config.common.hf_endpoint, - hf_token=worker_config.common.hf_token if use_token else None, - ) - assert exc_info.value.code == error_code - if cause is None: - assert exc_info.value.disclose_cause is False - assert exc_info.value.cause_exception is None - else: - assert exc_info.value.disclose_cause is True - assert exc_info.value.cause_exception == cause - response = exc_info.value.as_response() - assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} - response_dict = dict(response) - # ^ to remove mypy warnings - assert response_dict["cause_exception"] == cause - assert isinstance(response_dict["cause_traceback"], list) - assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" diff --git a/workers/splits/tests/test_worker.py b/workers/splits/tests/test_worker.py index e1e47474..a8da4127 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/splits/tests/test_worker.py @@ -7,2 +7,3 @@ import pytest -from libcache.simple_cache import DoesNotExist, _clean_cache_database, get_response -from libqueue.queue import _clean_queue_database +from libcommon.exceptions import CustomError +from libcommon.queue import _clean_queue_database +from libcommon.simple_cache import DoesNotExist, _clean_cache_database, get_response @@ -10,3 +11,4 @@ from libqueue.queue import _clean_queue_database -from splits.config import WorkerConfig -from splits.utils import CacheKind -from splits.worker import SplitsWorker +from splits.config import AppConfig +from splits.worker import SplitsWorker, compute_splits_response + +from .fixtures.hub import HubDatasets @@ -22,2 +24,2 @@ def clean_mongo_database() -> None: -def worker(worker_config: WorkerConfig) -> SplitsWorker: - return SplitsWorker(worker_config) +def worker(app_config: AppConfig) -> SplitsWorker: + return SplitsWorker(app_config=app_config, endpoint="/splits") @@ -36 +38 @@ def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: - worker.compute(dataset=dataset) + worker.process(dataset=dataset) @@ -41 +43 @@ def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: -def test_compute(worker: SplitsWorker, hub_public_csv: str) -> None: +def test_process(worker: SplitsWorker, hub_public_csv: str) -> None: @@ -43,2 +45,2 @@ def test_compute(worker: SplitsWorker, hub_public_csv: str) -> None: - assert worker.compute(dataset=dataset) is True - cached_response = get_response(kind=CacheKind.SPLITS.value, dataset=hub_public_csv) + assert worker.process(dataset=dataset) is True + cached_response = get_response(kind=worker.processing_step.cache_kind, dataset=hub_public_csv) @@ -58 +60 @@ def test_doesnotexist(worker: SplitsWorker) -> None: - assert worker.compute(dataset=dataset) is False + assert worker.process(dataset=dataset) is False @@ -60 +62 @@ def test_doesnotexist(worker: SplitsWorker) -> None: - get_response(kind=CacheKind.SPLITS.value, dataset=dataset) + get_response(kind=worker.processing_step.cache_kind, dataset=dataset) @@ -66,0 +69,49 @@ def test_process_job(worker: SplitsWorker, hub_public_csv: str) -> None: + + [email protected]( + "name,use_token,error_code,cause", + [ + ("public", False, None, None), + ("audio", False, None, None), + ("gated", True, None, None), + ("private", True, None, None), + ("empty", False, "EmptyDatasetError", "EmptyDatasetError"), + # should we really test the following cases? + # The assumption is that the dataset exists and is accessible with the token + ("does_not_exist", False, "SplitsNamesError", "FileNotFoundError"), + ("gated", False, "SplitsNamesError", "FileNotFoundError"), + ("private", False, "SplitsNamesError", "FileNotFoundError"), + ], +) +def test_compute_splits_response_simple_csv( + hub_datasets: HubDatasets, name: str, use_token: bool, error_code: str, cause: str, app_config: AppConfig +) -> None: + dataset = hub_datasets[name]["name"] + expected_splits_response = hub_datasets[name]["splits_response"] + if error_code is None: + result = compute_splits_response( + dataset=dataset, + hf_token=app_config.common.hf_token if use_token else None, + ) + assert result == expected_splits_response + return + + with pytest.raises(CustomError) as exc_info: + compute_splits_response( + dataset=dataset, + hf_token=app_config.common.hf_token if use_token else None, + ) + assert exc_info.value.code == error_code + if cause is None: + assert exc_info.value.disclose_cause is False + assert exc_info.value.cause_exception is None + else: + assert exc_info.value.disclose_cause is True + assert exc_info.value.cause_exception == cause + response = exc_info.value.as_response() + assert set(response.keys()) == {"error", "cause_exception", "cause_message", "cause_traceback"} + response_dict = dict(response) + # ^ to remove mypy warnings + assert response_dict["cause_exception"] == cause + assert isinstance(response_dict["cause_traceback"], list) + assert response_dict["cause_traceback"][0] == "Traceback (most recent call last):\n" diff --git a/workers/splits/tests/utils.py b/workers/splits/tests/utils.py deleted file mode 100644 index 4e3fdff0..00000000 --- a/workers/splits/tests/utils.py +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Tuple - - -def get_default_config_split(dataset: str) -> Tuple[str, str, str]: - config = dataset.replace("/", "--") - split = "train" - return dataset, config, split
3012da62054a025467616abc14b0b46e1f11ea13
Sylvain Lesage
2022-11-24T12:48:09
fix: 🐛 install missing dependency (#647)
diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index 1ff16534..c1e8be4c 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -57 +57 @@ jobs: - run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" + run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" @@ -59,2 +59,2 @@ jobs: - if: ${{ inputs.is-datasets-worker == false }} - run: bash -c 'poetry run pip-audit -r <(poetry export -f requirements.txt --with dev)' + if: ${{ inputs.is-datasets-worker == false && inputs.is-library-with-pymongo == false }} + run: bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d')" diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 5b47d01f..ae5c52de 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-b6d4c8a" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-2d81b2f" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-b6d4c8a", - "api": "huggingface/datasets-server-services-api:sha-b6d4c8a" + "admin": "huggingface/datasets-server-services-admin:sha-2d81b2f", + "api": "huggingface/datasets-server-services-api:sha-2d81b2f" @@ -12,2 +12,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-2bb27cb", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-2bb27cb" + "splits": "huggingface/datasets-server-workers-splits:sha-2d81b2f", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-2d81b2f" diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 722ae292..c2ba669a 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -157 +157 @@ name = "dnspython" -version = "2.2.1" +version = "1.16.0" @@ -161 +161 @@ optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -164,6 +164,2 @@ python-versions = ">=3.6,<4.0" -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<37.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] @@ -285 +281 @@ name = "libcache" -version = "0.4.1" +version = "0.4.3" @@ -295,0 +292 @@ mongoengine = ">=0.24.1,<0.25.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -299 +296 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" @@ -319 +316 @@ name = "libqueue" -version = "0.4.11" +version = "0.4.13" @@ -330,0 +328 @@ psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -334 +332 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" @@ -600 +598 @@ name = "pymongo" -version = "4.3.3" +version = "3.13.0" @@ -604 +602 @@ optional = false -python-versions = ">=3.7" +python-versions = "*" @@ -607 +605 @@ python-versions = ">=3.7" -dnspython = ">=1.16.0,<3.0.0" +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} @@ -611 +609 @@ aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] @@ -614,0 +613,2 @@ snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] @@ -842 +842 @@ python-versions = "3.9.6" -content-hash = "612fa99fab87c170611a93581bb85dc6627d2e0c17a3ef904b0b0b0129f107f3" +content-hash = "d4fecce5ce9d0f7f3639f95e30c76cd8e16be91f46121d8939916da0d6748219" @@ -961,2 +961,2 @@ dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, @@ -1001 +1001 @@ libcache = [ - {file = "libcache-0.4.1-py3-none-any.whl", hash = "sha256:cbd2dc8050d96a933fd5806d1153b7d3deee828b80a47ddb33782f712ecf6483"}, + {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, @@ -1007 +1007 @@ libqueue = [ - {file = "libqueue-0.4.11-py3-none-any.whl", hash = "sha256:4bc6f021571e4f1b2bd0e3062677d20809335efd6715532f6099ba76a3c3a8a1"}, + {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, @@ -1235,74 +1235,109 @@ pymongo = [ - {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, - {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, - {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, - {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, - {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, - {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, - {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, - {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, - {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, - {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, - {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, - {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, - {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, - {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, - {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, - {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, + {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, + {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, + {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, + {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, + {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, + {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, + {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, + {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, + {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, + {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, + {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, + {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, + {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, + {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, + {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, + {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, + {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, + {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, + {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, + {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, + {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, + {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, + {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, + {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, + {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index bb2b7b58..c220cbac 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } @@ -12 +12 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } diff --git a/libs/libcache/dist/libcache-0.4.2-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.2-py3-none-any.whl new file mode 100644 index 00000000..76507edd Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.2-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.4.2.tar.gz b/libs/libcache/dist/libcache-0.4.2.tar.gz new file mode 100644 index 00000000..e9def6cc Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.2.tar.gz differ diff --git a/libs/libcache/dist/libcache-0.4.3-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.3-py3-none-any.whl new file mode 100644 index 00000000..fbf5ec20 Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.3-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.4.3.tar.gz b/libs/libcache/dist/libcache-0.4.3.tar.gz new file mode 100644 index 00000000..d8c4c5da Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.3.tar.gz differ diff --git a/libs/libcache/poetry.lock b/libs/libcache/poetry.lock index 183e9bce..ac577c4a 100644 --- a/libs/libcache/poetry.lock +++ b/libs/libcache/poetry.lock @@ -157 +157 @@ name = "dnspython" -version = "2.2.1" +version = "1.16.0" @@ -161 +161 @@ optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -164,6 +164,2 @@ python-versions = ">=3.6,<4.0" -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<37.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] @@ -215 +211 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -219 +215 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -528 +524 @@ name = "pymongo" -version = "4.3.3" +version = "3.13.0" @@ -532 +528 @@ optional = false -python-versions = ">=3.7" +python-versions = "*" @@ -535 +531 @@ python-versions = ">=3.7" -dnspython = ">=1.16.0,<3.0.0" +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} @@ -539 +535 @@ aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] @@ -542,0 +539,2 @@ snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] @@ -660 +658 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -770 +768 @@ python-versions = "3.9.6" -content-hash = "e1ea64172f9bd00dc66ae222c0ba9d1ab09be703a96e52e3141a9ba7f445251f" +content-hash = "3362e9488ab16ced56bbc6ce4d38fc36bdc7c7cf11ad73818788a373a486cca6" @@ -889,2 +887,2 @@ dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, @@ -905,2 +903,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -1087,74 +1085,109 @@ pymongo = [ - {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, - {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, - {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, - {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, - {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, - {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, - {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, - {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, - {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, - {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, - {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, - {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, - {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, - {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, - {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, - {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, + {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, + {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, + {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, + {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, + {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, + {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, + {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, + {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, + {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, + {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, + {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, + {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, + {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, + {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, + {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, + {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, + {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, + {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, + {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, + {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, + {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, + {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, + {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, + {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, + {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, @@ -1233,2 +1266,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index 98b3eaf0..e657b981 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -5 +5 @@ name = "libcache" -version = "0.4.1" +version = "0.4.3" @@ -12,0 +13 @@ mongoengine = "^0.24.1" +pymongo = { extras = ["srv"], version = "^3.13.0" } diff --git a/libs/libqueue/dist/libqueue-0.4.12-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.12-py3-none-any.whl new file mode 100644 index 00000000..8f614c62 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.12-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.12.tar.gz b/libs/libqueue/dist/libqueue-0.4.12.tar.gz new file mode 100644 index 00000000..ee8d73cb Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.12.tar.gz differ diff --git a/libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl new file mode 100644 index 00000000..6bee9b1f Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.13.tar.gz b/libs/libqueue/dist/libqueue-0.4.13.tar.gz new file mode 100644 index 00000000..8303ca84 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.13.tar.gz differ diff --git a/libs/libqueue/poetry.lock b/libs/libqueue/poetry.lock index c3ac2e9c..428e4308 100644 --- a/libs/libqueue/poetry.lock +++ b/libs/libqueue/poetry.lock @@ -149 +149 @@ name = "dnspython" -version = "2.2.1" +version = "1.16.0" @@ -153 +153 @@ optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -156,6 +156,2 @@ python-versions = ">=3.6,<4.0" -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<37.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] @@ -207 +203 @@ name = "gitdb" -version = "4.0.9" +version = "4.0.10" @@ -211 +207 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -531 +527 @@ name = "pymongo" -version = "4.3.3" +version = "3.13.0" @@ -535 +531 @@ optional = false -python-versions = ">=3.7" +python-versions = "*" @@ -538 +534 @@ python-versions = ">=3.7" -dnspython = ">=1.16.0,<3.0.0" +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} @@ -542 +538 @@ aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] @@ -545,0 +542,2 @@ snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] @@ -663 +661 @@ name = "setuptools" -version = "65.6.2" +version = "65.6.3" @@ -781 +779 @@ python-versions = "3.9.6" -content-hash = "1833f2599887d7d9f2273f2ec02fcb396e4a114919112c539a9159ee7bd2757c" +content-hash = "03e5ffad91f3c5ecb6155f80e69e77d6fc5a77497f47056361ddf900f2236f59" @@ -896,2 +894,2 @@ dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, @@ -912,2 +910,2 @@ gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -1110,74 +1108,109 @@ pymongo = [ - {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, - {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, - {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, - {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, - {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, - {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, - {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, - {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, - {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, - {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, - {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, - {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, - {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, - {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, - {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, - {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, + {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, + {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, + {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, + {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, + {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, + {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, + {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, + {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, + {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, + {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, + {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, + {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, + {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, + {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, + {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, + {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, + {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, + {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, + {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, + {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, + {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, + {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, + {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, + {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, + {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, @@ -1256,2 +1289,2 @@ setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index b59eb896..ee0aa673 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.4.11" +version = "0.4.13" @@ -13,0 +14 @@ psutil = "^5.9.2" +pymongo = { extras = ["srv"], version = "^3.13.0" } diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index c801c2ac..02911565 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -185 +185 @@ name = "dnspython" -version = "2.2.1" +version = "1.16.0" @@ -189 +189 @@ optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -192,6 +192,2 @@ python-versions = ">=3.6,<4.0" -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<37.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] @@ -360 +356 @@ name = "libcache" -version = "0.4.1" +version = "0.4.3" @@ -370,0 +367 @@ mongoengine = ">=0.24.1,<0.25.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -374 +371 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" @@ -394 +391 @@ name = "libqueue" -version = "0.4.11" +version = "0.4.13" @@ -405,0 +403 @@ psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -409 +407 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" @@ -686 +684 @@ name = "pymongo" -version = "4.3.3" +version = "3.13.0" @@ -690 +688 @@ optional = false -python-versions = ">=3.7" +python-versions = "*" @@ -693 +691 @@ python-versions = ">=3.7" -dnspython = ">=1.16.0,<3.0.0" +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} @@ -697 +695 @@ aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] @@ -700,0 +699,2 @@ snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] @@ -1024 +1024 @@ python-versions = "3.9.6" -content-hash = "cd8bab3ce4ced3023d7200ba1ee4a773478fdc8f2ed08f762f4611f5f6ff0811" +content-hash = "fea2b0d92b950310e73d15ef4a4029a600fd183996ad2c28259a2c846ade8c40" @@ -1151,2 +1151,2 @@ dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, @@ -1203 +1203 @@ libcache = [ - {file = "libcache-0.4.1-py3-none-any.whl", hash = "sha256:cbd2dc8050d96a933fd5806d1153b7d3deee828b80a47ddb33782f712ecf6483"}, + {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, @@ -1209 +1209 @@ libqueue = [ - {file = "libqueue-0.4.11-py3-none-any.whl", hash = "sha256:4bc6f021571e4f1b2bd0e3062677d20809335efd6715532f6099ba76a3c3a8a1"}, + {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, @@ -1441,74 +1441,109 @@ pymongo = [ - {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, - {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, - {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, - {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, - {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, - {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, - {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, - {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, - {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, - {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, - {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, - {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, - {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, - {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, - {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, - {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, + {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, + {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, + {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, + {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, + {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, + {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, + {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, + {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, + {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, + {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, + {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, + {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, + {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, + {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, + {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, + {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, + {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, + {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, + {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, + {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, + {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, + {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, + {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, + {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, + {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 024b0528..7e409622 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -11 +11 @@ huggingface-hub = "^0.11.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 898284b5..39db46d4 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -185 +185 @@ name = "dnspython" -version = "2.2.1" +version = "1.16.0" @@ -189 +189 @@ optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -192,6 +192,2 @@ python-versions = ">=3.6,<4.0" -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<37.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] @@ -376 +372 @@ name = "libcache" -version = "0.4.1" +version = "0.4.3" @@ -386,0 +383 @@ mongoengine = ">=0.24.1,<0.25.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -390 +387 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" @@ -410 +407 @@ name = "libqueue" -version = "0.4.11" +version = "0.4.13" @@ -421,0 +419 @@ psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -425 +423 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" @@ -710 +708 @@ name = "pymongo" -version = "4.3.3" +version = "3.13.0" @@ -714 +712 @@ optional = false -python-versions = ">=3.7" +python-versions = "*" @@ -717 +715 @@ python-versions = ">=3.7" -dnspython = ">=1.16.0,<3.0.0" +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} @@ -721 +719 @@ aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] @@ -724,0 +723,2 @@ snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] @@ -1066 +1066 @@ python-versions = "3.9.6" -content-hash = "9339471cb871e188a677e1d8619390e0411b119cbcfb64e2bb6d0c9b499a8972" +content-hash = "2a3e653f65a138001dd2d8d68462536ec5f3f40476e9363204e72a041e20d8e4" @@ -1193,2 +1193,2 @@ dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, @@ -1249 +1249 @@ libcache = [ - {file = "libcache-0.4.1-py3-none-any.whl", hash = "sha256:cbd2dc8050d96a933fd5806d1153b7d3deee828b80a47ddb33782f712ecf6483"}, + {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, @@ -1255 +1255 @@ libqueue = [ - {file = "libqueue-0.4.11-py3-none-any.whl", hash = "sha256:4bc6f021571e4f1b2bd0e3062677d20809335efd6715532f6099ba76a3c3a8a1"}, + {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, @@ -1529,74 +1529,109 @@ pymongo = [ - {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, - {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, - {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, - {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, - {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, - {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, - {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, - {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, - {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, - {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, - {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, - {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, - {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, - {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, - {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, - {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, + {file = "pymongo-3.13.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:3ad3a3df830f7df7e0856c2bdb54d19f5bf188bd7420985e18643b8e4d2a075f"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b96e0e9d2d48948240b510bac81614458fc10adcd3a93240c2fd96448b4efd35"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f592b202d77923498b32ddc5b376e5fa9ba280d3e16ed56cb8c932fe6d6a478"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:851f2bb52b5cb2f4711171ca925e0e05344a8452972a748a8a8ffdda1e1d72a7"}, + {file = "pymongo-3.13.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1c9d23f62a3fa7523d849c4942acc0d9ff7081ebc00c808ee7cfdc070df0687f"}, + {file = "pymongo-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:a17b81f22398e3e0f72bdf938e98c810286994b2bcc0a125cd5ad8fd4ea54ad7"}, + {file = "pymongo-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:4f6dd55dab77adf60b445c11f426ee5cdfa1b86f6d54cb937bfcbf09572333ab"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:776f90bf2252f90a4ae838e7917638894c6356bef7265f424592e2fd1f577d05"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:50b99f4d3eee6f03778fe841d6f470e6c18e744dc665156da6da3bc6e65b398d"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50a81b2d9f188c7909e0a1084fa969bb92a788076809c437ac1ae80393f46df9"}, + {file = "pymongo-3.13.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7c45a8a1a752002b0a7c81ab3a4c5e3b6f67f9826b16fbe3943f5329f565f24"}, + {file = "pymongo-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1037097708498bdc85f23c8798a5c46c7bce432d77d23608ff14e0d831f1a971"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:b5b733694e7df22d5c049581acfc487695a6ff813322318bed8dd66f79978636"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d7c91747ec8dde51440dd594603158cc98abb3f7df84b2ed8a836f138285e4fb"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:f4175fcdddf764d371ee52ec4505a40facee2533e84abf2953cda86d050cfa1f"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:93d4e9a02c17813b34e4bd9f6fbf07310c140c8f74341537c24d07c1cdeb24d1"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:3b261d593f2563299062733ae003a925420a86ff4ddda68a69097d67204e43f3"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:172db03182a22e9002157b262c1ea3b0045c73d4ff465adc152ce5b4b0e7b8d4"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09de3bfc995ae8cb955abb0c9ae963c134dba1b5622be3bcc527b89b0fd4091c"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0379447587ee4b8f983ba183202496e86c0358f47c45612619d634d1fcd82bd"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30245a8747dc90019a3c9ad9df987e0280a3ea632ad36227cde7d1d8dcba0830"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6fddf6a7b91da044f202771a38e71bbb9bf42720a406b26b25fe2256e7102"}, + {file = "pymongo-3.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5831a377d15a626fbec10890ffebc4c6abcd37e4126737932cd780a171eabdc1"}, + {file = "pymongo-3.13.0-cp310-cp310-win32.whl", hash = "sha256:944249aa83dee314420c37d0f40c30a8f6dc4a3877566017b87062e53af449f4"}, + {file = "pymongo-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea8824ebc9a1a5c8269e8f1e3989b5a6bec876726e2f3c33ebd036cb488277f0"}, + {file = "pymongo-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bdd34c57b4da51a7961beb33645646d197e41f8517801dc76b37c1441e7a4e10"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f9cc42a162faa241c82e117ac85734ae9f14343dc2df1c90c6b2181f791b22"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a82a1c10f5608e6494913faa169e213d703194bfca0aa710901f303be212414"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8927f22ef6a16229da7f18944deac8605bdc2c0858be5184259f2f7ce7fd4459"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6f8191a282ef77e526f8f8f63753a437e4aa4bc78f5edd8b6b6ed0eaebd5363"}, + {file = "pymongo-3.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d9ed67c987bf9ac2ac684590ba3d2599cdfb0f331ee3db607f9684469b3b59d"}, + {file = "pymongo-3.13.0-cp311-cp311-win32.whl", hash = "sha256:e8f6979664ff477cd61b06bf8aba206df7b2334209815ab3b1019931dab643d6"}, + {file = "pymongo-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:174fd1000e896d0dfbc7f6d7e6a1992a4868796c7dec31679e38218c78d6a942"}, + {file = "pymongo-3.13.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:d1ee773fb72ba024e7e3bb6ea8907fe52bccafcb5184aaced6bad995bd30ea20"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:28565e3dbd69fe5fe35a210067064dbb6ed5abe997079f653c19c873c3896fe6"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5c1db7d366004d6c699eb08c716a63ae0a3e946d061cbebea65d7ce361950265"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1956f3338c10308e2f99c2c9ff46ae412035cbcd7aaa76c39ccdb806854a247"}, + {file = "pymongo-3.13.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10f0fddc1d63ba3d4a4bffcc7720184c1b7efd570726ad5e2f55818da320239f"}, + {file = "pymongo-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:570ae3365b23d4fd8c669cb57613b1a90b2757e993588d3370ef90945dbeec4b"}, + {file = "pymongo-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:79f777eaf3f5b2c6d81f9ef00d87837001d7063302503bbcbfdbf3e9bc27c96f"}, + {file = "pymongo-3.13.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:d42eb29ba314adfd9c11234b4b646f61b0448bf9b00f14db4b317e6e4b947e77"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e5e87c0eb774561c546f979342a8ff36ebee153c60a0b6c6b03ba989ceb9538c"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f2c5a5984599a88d087a15859860579b825098b473d8c843f1979a83d159f2e"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:59c98e86c5e861032b71e6e5b65f23e6afaacea6e82483b66f1191a5021a7b4f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:70b67390e27e58876853efbb87e43c85252de2515e2887f7dd901b4fa3d21973"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:42ba8606492d76e6f9e4c7a458ed4bc712603be393259a52450345f0945da2cf"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:0e5536994cf2d8488c6fd9dea71df3c4dbb3e0d2ba5e695da06d9142a29a0969"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:fe8194f107f0fa3cabd14e9e809f174eca335993c1db72d1e74e0f496e7afe1f"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d593d50815771f517d3ac4367ff716e3f3c78edae51d98e1e25791459f8848ff"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5136ebe8da6a1604998a8eb96be55935aa5f7129c41cc7bddc400d48e8df43be"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a424bdedfd84454d2905a861e0d4bb947cc5bd024fdeb3600c1a97d2be0f4255"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5161167b3840e9c84c80f2534ea6a099f51749d5673b662a3dd248be17c3208"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644470442beaf969df99c4e00367a817eee05f0bba5d888f1ba6fe97b5e1c102"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2406df90b2335371706c59b7d79e9633b81ed2a7ecd48c1faf8584552bdf2d90"}, + {file = "pymongo-3.13.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:222591b828de10ac90064047b5d4916953f38c38b155009c4b8b5e0d33117c2b"}, + {file = "pymongo-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:7cb987b199fa223ad78eebaa9fbc183d5a5944bfe568a9d6f617316ca1c1f32f"}, + {file = "pymongo-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6cbb73d9fc2282677e2b7a137d13da987bd0b13abd88ed27bba5534c226db06"}, + {file = "pymongo-3.13.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b1223b826acbef07a7f5eb9bf37247b0b580119916dca9eae19d92b1290f5855"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:398fb86d374dc351a4abc2e24cd15e5e14b2127f6d90ce0df3fdf2adcc55ac1b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9c3d07ea19cd2856d9943dce37e75d69ecbb5baf93c3e4c82f73b6075c481292"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2943d739715f265a2983ac43747595b6af3312d0a370614040959fd293763adf"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c3b70ed82f20d18d22eafc9bda0ea656605071762f7d31f3c5afc35c59d3393b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:7ec2bb598847569ae34292f580842d37619eea3e546005042f485e15710180d5"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:8cc37b437cba909bef06499dadd91a39c15c14225e8d8c7870020049f8a549fe"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:65a063970e15a4f338f14b820561cf6cdaf2839691ac0adb2474ddff9d0b8b0b"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02f0e1a75d3bc0e16c7e15daf9c56185642be055e425f3b34888fc6eb1b22401"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e74b9c2aca2734c7f49f00fe68d6830a30d26df60e2ace7fe40ccb92087b94"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24e954be35ad4537840f20bbc8d75320ae647d3cb4fab12cb8fcd2d55f408e76"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a149377d1ff766fd618500798d0d94637f66d0ae222bb6d28f41f3e15c626297"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61660710b054ae52c8fc10368e91d74719eb05554b631d7f8ca93d21d2bff2e6"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bbc0d27dfef7689285e54f2e0a224f0c7cd9d5c46d2638fabad5500b951c92f"}, + {file = "pymongo-3.13.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b2ed9c3b30f11cd4a3fbfc22167af7987b01b444215c2463265153fe7cf66d6"}, + {file = "pymongo-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:1c2c5e2b00e2fadcd590c0b2e293d71215e98ed1cb635cfca2be4998d197e534"}, + {file = "pymongo-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:32eac95bbb030b2376ffd897376c6f870222a3457f01a9ce466b9057876132f8"}, + {file = "pymongo-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a796ef39dadf9d73af05d24937644d386495e43a7d13617aa3651d836da542c8"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b6793baf4639c72a500698a49e9250b293e17ae1faf11ac1699d8141194786fe"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:80d8576b04d0824f63bf803190359c0d3bcb6e7fa63fefbd4bc0ceaa7faae38c"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:db2e11507fe9cc2a722be21ccc62c1b1295398fe9724c1f14900cdc7166fc0d7"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b01ce58eec5edeededf1992d2dce63fb8565e437be12d6f139d75b15614c4d08"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d1a19d6c5098f1f4e11430cd74621699453cbc534dd7ade9167e582f50814b19"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:7219b1a726ced3bacecabef9bd114529bbb69477901373e800d7d0140baadc95"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:2dae3b353a10c3767e0aa1c1492f2af388f1012b08117695ab3fd1f219e5814e"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12721d926d43d33dd3318e58dce9b0250e8a9c6e1093fa8e09f4805193ff4b43"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6af0a4b17faf26779d5caee8542a4f2cba040cea27d3bffc476cbc6ccbd4c8ee"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b9d0f5a445c7e0ddcc021b09835aa6556f0166afc498f57dfdd72cdf6f02ad"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db5b4f8ad8607a3d612da1d4c89a84e4cf5c88f98b46365820d9babe5884ba45"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dbf5fecf653c152edb75a35a8b15dfdc4549473484ee768aeb12c97983cead"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34cd48df7e1fc69222f296d8f69e3957eb7c6b5aa0709d3467184880ed7538c0"}, + {file = "pymongo-3.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f755ff1f4ab4ca790d1d6d3229006100b301475948021b6b2757822e0d6c97"}, + {file = "pymongo-3.13.0-cp38-cp38-win32.whl", hash = "sha256:b0746d0d4535f56bbaa63a8f6da362f330804d578e66e126b226eebe76c2bf00"}, + {file = "pymongo-3.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ad0515abb132f52ce9d8abd1a29681a1e65dba7b7fe13ea01e1a8db5715bf80"}, + {file = "pymongo-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c5cb6c93c94df76a879bad4b89db0104b01806d17c2b803c1316ba50962b6d6"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2e0854170813238f0c3131050c67cb1fb1ade75c93bf6cd156c1bd9a16095528"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1410faa51ce835cc1234c99ec42e98ab4f3c6f50d92d86a2d4f6e11c97ee7a4e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d7910135f5de1c5c3578e61d6f4b087715b15e365f11d4fa51a9cee92988b2bd"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:028175dd8d2979a889153a2308e8e500b3df7d9e3fd1c33ca7fdeadf61cc87a2"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2bfc39276c0e6d07c95bd1088b5003f049e986e089509f7dbd68bb7a4b1e65ac"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4092b660ec720d44d3ca81074280dc25c7a3718df1b6c0fe9fe36ac6ed2833e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5bdeb71a610a7b801416268e500e716d0fe693fb10d809e17f0fb3dac5be5a34"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3bca8e76f5c00ed2bb4325e0e383a547d71595926d5275d7c88175aaf7435e"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c7cab8155f430ca460a6fc7ae8a705b34f3e279a57adb5f900eb81943ec777c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a32f3dfcca4a4816373bdb6256c18c78974ebb3430e7da988516cd95b2bd6e4"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ed2788a6ec68743e2040ab1d16573d7d9f6e7333e45070ce9268cbc93d148c"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e61a536ffed84d10376c21c13a6ed1ebefb61989a844952547c229d6aeedf3"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0665412dce26b2318092a33bd2d2327d487c4490cfcde158d6946d39b1e28d78"}, + {file = "pymongo-3.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64ed1a5ce5e5926727eb0f87c698c4d9a7a9f7b0953683a65e9ce2b7cc5f8e91"}, + {file = "pymongo-3.13.0-cp39-cp39-win32.whl", hash = "sha256:7593cb1214185a0c5b43b96effc51ce82ddc933298ee36db7dc2bd45d61b4adc"}, + {file = "pymongo-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:3cfc9bc1e8b5667bc1f3dbe46d2f85b3f24ff7533893bdc1203058012db2c046"}, + {file = "pymongo-3.13.0.tar.gz", hash = "sha256:e22d6cf5802cd09b674c307cc9e03870b8c37c503ebec3d25b86f2ce8c535dc7"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 9fd968f7..cbd312ac 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } @@ -14 +14 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } diff --git a/tools/PythonAudit.mk b/tools/PythonAudit.mk index 00d66819..c7f2f6f2 100644 --- a/tools/PythonAudit.mk +++ b/tools/PythonAudit.mk @@ -3 +3,2 @@ pip-audit: - bash -c 'poetry run pip-audit -r <(poetry export -f requirements.txt --with dev)' + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d')" +# ^ we remove problematic lines to have a working pip-audit. See https://github.com/pypa/pip-audit/issues/84#issuecomment-1326203111 for "requests" diff --git a/workers/first_rows/Makefile b/workers/first_rows/Makefile index 1a830f5c..f70a7259 100644 --- a/workers/first_rows/Makefile +++ b/workers/first_rows/Makefile @@ -19 +19 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^libcache @/,+1 d' | sed '/^libcommon @/,+1 d' | sed '/^libqueue @/,+1 d')" + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d' | sed '/^libcache @/,+1 d' | sed '/^libcommon @/,+1 d' | sed '/^libqueue @/,+1 d')" diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 58ee5025..5e9fd78a 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -422,0 +423,12 @@ graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "dnspython" +version = "1.16.0" +description = "DNS toolkit" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] + @@ -860 +872 @@ name = "libcache" -version = "0.4.1" +version = "0.4.3" @@ -870,0 +883 @@ mongoengine = ">=0.24.1,<0.25.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -874 +887 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" @@ -902 +915 @@ name = "libqueue" -version = "0.4.11" +version = "0.4.13" @@ -913,0 +927 @@ psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -917 +931 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" @@ -1591,0 +1606,3 @@ python-versions = "*" +[package.dependencies] +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} + @@ -2443 +2460 @@ python-versions = "3.9.6" -content-hash = "b796826426df1bb68b8b652b369c0500dacdee574fca3f18a480cc74b7462eb4" +content-hash = "6e12ec014e04f388b4a2fdd4af6be9075e9ecf7aa38f49ec9c167e2c2d2ed33f" @@ -2982,0 +3000,4 @@ dill = [ +dnspython = [ + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, +] @@ -3262 +3283 @@ libcache = [ - {file = "libcache-0.4.1-py3-none-any.whl", hash = "sha256:cbd2dc8050d96a933fd5806d1153b7d3deee828b80a47ddb33782f712ecf6483"}, + {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, @@ -3280 +3301 @@ libqueue = [ - {file = "libqueue-0.4.11-py3-none-any.whl", hash = "sha256:4bc6f021571e4f1b2bd0e3062677d20809335efd6715532f6099ba76a3c3a8a1"}, + {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 6439d116..b194f4ad 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } @@ -22 +22 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false } diff --git a/workers/splits/Makefile b/workers/splits/Makefile index d223cf9a..7fc2004b 100644 --- a/workers/splits/Makefile +++ b/workers/splits/Makefile @@ -19 +19 @@ pip-audit: - bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" + bash -c "poetry run pip-audit -r <(poetry export -f requirements.txt --with dev | sed '/^pymongo==/,+110 d' | sed '/^requests==2.28.1 ;/,+2 d' | sed '/^kenlm @/d' | sed '/^trec-car-tools @/d')" diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 58ee5025..5e9fd78a 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -422,0 +423,12 @@ graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "dnspython" +version = "1.16.0" +description = "DNS toolkit" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dnssec = ["ecdsa (>=0.13)", "pycryptodome"] +idna = ["idna (>=2.1)"] + @@ -860 +872 @@ name = "libcache" -version = "0.4.1" +version = "0.4.3" @@ -870,0 +883 @@ mongoengine = ">=0.24.1,<0.25.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -874 +887 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl" @@ -902 +915 @@ name = "libqueue" -version = "0.4.11" +version = "0.4.13" @@ -913,0 +927 @@ psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.13.0,<4.0.0", extras = ["srv"]} @@ -917 +931 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl" @@ -1591,0 +1606,3 @@ python-versions = "*" +[package.dependencies] +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} + @@ -2443 +2460 @@ python-versions = "3.9.6" -content-hash = "b796826426df1bb68b8b652b369c0500dacdee574fca3f18a480cc74b7462eb4" +content-hash = "6e12ec014e04f388b4a2fdd4af6be9075e9ecf7aa38f49ec9c167e2c2d2ed33f" @@ -2982,0 +3000,4 @@ dill = [ +dnspython = [ + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, +] @@ -3262 +3283 @@ libcache = [ - {file = "libcache-0.4.1-py3-none-any.whl", hash = "sha256:cbd2dc8050d96a933fd5806d1153b7d3deee828b80a47ddb33782f712ecf6483"}, + {file = "libcache-0.4.3-py3-none-any.whl", hash = "sha256:d21c278aa72be395fe5a541eadfab3e33e9565f821a58aa161386f1ec9346041"}, @@ -3280 +3301 @@ libqueue = [ - {file = "libqueue-0.4.11-py3-none-any.whl", hash = "sha256:4bc6f021571e4f1b2bd0e3062677d20809335efd6715532f6099ba76a3c3a8a1"}, + {file = "libqueue-0.4.13-py3-none-any.whl", hash = "sha256:6aeec523eff05f1ee07cbc6bcf870ec301e5ce32913964f6fc385760d2ef954c"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 4033188c..ec39e07f 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.3-py3-none-any.whl", develop = false } @@ -22 +22 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.3-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.11-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.13-py3-none-any.whl", develop = false }
8a808088a847dbdeee1ce2bc4909708a797b6d0c
Sylvain Lesage
2022-11-24T12:06:20
feat: 🎸 upgrade to datasets 2.7.1 (#646)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index f3713f6f..5b47d01f 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12,2 +12,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-4a48536", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-4a48536" + "splits": "huggingface/datasets-server-workers-splits:sha-2bb27cb", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-2bb27cb" diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 5769897f..58ee5025 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -823,0 +824 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" + @@ -2442 +2443 @@ python-versions = "3.9.6" -content-hash = "a1e1b7767291bb8b0c8e539245532a35e0494ef5926280d5071c3566500e3179" +content-hash = "b796826426df1bb68b8b652b369c0500dacdee574fca3f18a480cc74b7462eb4" diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 9f22734a..6439d116 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.7.0" } +datasets = { extras = ["audio", "vision"], version = "~2.7.1" } diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 5769897f..58ee5025 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -823,0 +824 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" + @@ -2442 +2443 @@ python-versions = "3.9.6" -content-hash = "a1e1b7767291bb8b0c8e539245532a35e0494ef5926280d5071c3566500e3179" +content-hash = "b796826426df1bb68b8b652b369c0500dacdee574fca3f18a480cc74b7462eb4" diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 7da9401b..4033188c 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.7.0" } +datasets = { extras = ["audio", "vision"], version = "~2.7.1" }
18a51afe1c1c89ef796d9b5916d1691a853ad325
Sylvain Lesage
2022-11-24T11:24:03
Replace safety with pip audit (#645)
38070c7058294dc2401bbcafa80559598ffe7c17
Sylvain Lesage
2022-11-21T16:01:51
feat: 🎸 upgrade datasets (#644)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 6a9bbea2..e38d9364 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -12,2 +12,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-dfa89b1", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-dfa89b1" + "splits": "huggingface/datasets-server-workers-splits:sha-a0d80a3", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-319bbb8" diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index e5068ec5..8ba4d800 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -11,0 +12,2 @@ + - name: HF_ENDPOINT # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 + value: {{ .Values.common.hfEndpoint | quote }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index e7a0a306..f9d155d0 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -11,0 +12,2 @@ + - name: HF_ENDPOINT # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 + value: {{ .Values.common.hfEndpoint | quote }} diff --git a/e2e/tests/fixtures/hub.py b/e2e/tests/fixtures/hub.py index 111580bf..b9caf969 100644 --- a/e2e/tests/fixtures/hub.py +++ b/e2e/tests/fixtures/hub.py @@ -28 +27,0 @@ CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" -CI_HUB_DATASETS_URL = CI_HUB_ENDPOINT + "/datasets/{repo_id}/resolve/{revision}/{path}" diff --git a/services/admin/tests/fixtures/hub.py b/services/admin/tests/fixtures/hub.py index 16395071..a4ce36ed 100644 --- a/services/admin/tests/fixtures/hub.py +++ b/services/admin/tests/fixtures/hub.py @@ -24 +23,0 @@ CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" -CI_HUB_DATASETS_URL = CI_HUB_ENDPOINT + "/datasets/{repo_id}/resolve/{revision}/{path}" diff --git a/tools/docker-compose-datasets-server-from-local-code.yml b/tools/docker-compose-datasets-server-from-local-code.yml index ac7272af..eab90c44 100644 --- a/tools/docker-compose-datasets-server-from-local-code.yml +++ b/tools/docker-compose-datasets-server-from-local-code.yml @@ -104,0 +105 @@ services: + HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 @@ -132,0 +134 @@ services: + HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml index 12612786..70728814 100644 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ b/tools/docker-compose-datasets-server-from-remote-images.yml @@ -98,0 +99 @@ services: + HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 @@ -124,0 +126 @@ services: + HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} # see https://github.com/huggingface/datasets/pull/5196#issuecomment-1322191411 diff --git a/workers/first_rows/README.md b/workers/first_rows/README.md index 6bd22919..6812892d 100644 --- a/workers/first_rows/README.md +++ b/workers/first_rows/README.md @@ -7 +7 @@ -The worker con be configured using environment variables. They are grouped by scope. +The worker can be configured using environment variables. They are grouped by scope. @@ -26,0 +27,2 @@ The following environment variables are used to configure two dependencies: the +If the Hub is not https://huggingface.co (i.e. if you set the `COMMON_HF_ENDPOINT` environment variable), you should also set the `HF_ENDPOINT` environment variable to the same value. See https://github.com/huggingface/datasets/pull/5196 for more details. + diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index a92b0be1..c1172798 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -323 +323 @@ name = "datasets" -version = "2.6.1" +version = "2.7.0" @@ -331 +331 @@ aiohttp = "*" -dill = "<0.3.6" +dill = "<0.3.7" @@ -351 +351 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "spacy (>=3.0.0)", "tldextract", "toml (>=0.10.1)", "typer (<0.5.0)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos"] @@ -355 +355 @@ s3 = ["fsspec", "boto3", "botocore", "s3fs"] -tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)"] +tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] @@ -357 +357 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "spacy (>=3.0.0)", "tldextract", "toml (>=0.10.1)", "typer (<0.5.0)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos"] @@ -2286 +2286 @@ python-versions = "3.9.6" -content-hash = "eb01a846e042f679dcf4bd26176654610b489950f9d6c442347bf814080b8da2" +content-hash = "616b43e42f6a9596a385c0883ef97f588224c80e8e233352759d5f33dc35f5df" diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 4b82bd4c..f2960c1a 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.6.0" } +datasets = { extras = ["audio", "vision"], version = "~2.7.0" } diff --git a/workers/first_rows/src/first_rows/config.py b/workers/first_rows/src/first_rows/config.py index a885cea5..b9819777 100644 --- a/workers/first_rows/src/first_rows/config.py +++ b/workers/first_rows/src/first_rows/config.py @@ -46 +45,0 @@ class WorkerConfig: - datasets.config.HUB_DATASETS_URL = self.common.hf_endpoint + "/datasets/{repo_id}/resolve/{revision}/{path}" @@ -50,0 +50,5 @@ class WorkerConfig: + + # Note: self.common.hf_endpoint is ignored by the huggingface_hub library for now (see + # the discussion at https://github.com/huggingface/datasets/pull/5196), and this breaks + # various of the datasets functions. The fix, for now, is to set the HF_ENDPOINT + # environment variable to the desired value. diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/first_rows/tests/fixtures/hub.py index cb2e8374..2129014d 100644 --- a/workers/first_rows/tests/fixtures/hub.py +++ b/workers/first_rows/tests/fixtures/hub.py @@ -29 +29,9 @@ CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" -CI_HUB_DATASETS_URL = CI_HUB_ENDPOINT + "/datasets/{repo_id}/resolve/{revision}/{path}" +CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE = CI_HUB_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" + + [email protected](autouse=True) +def ci_hfh_hf_hub_url(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr( + "huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE", CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE + ) + @@ -33 +40,0 @@ datasets.config.HF_ENDPOINT = CI_HUB_ENDPOINT -datasets.config.HUB_DATASETS_URL = CI_HUB_DATASETS_URL diff --git a/workers/splits/README.md b/workers/splits/README.md index f87fd5f4..27e20adc 100644 --- a/workers/splits/README.md +++ b/workers/splits/README.md @@ -7 +7 @@ -The worker con be configured using environment variables. They are grouped by scope. +The worker can be configured using environment variables. They are grouped by scope. @@ -16,0 +17,2 @@ The following environment variables are used to configure two dependencies: the +If the Hub is not https://huggingface.co (i.e. if you set the `COMMON_HF_ENDPOINT` environment variable), you should also set the `HF_ENDPOINT` environment variable to the same value. See https://github.com/huggingface/datasets/pull/5196 for more details. + diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 5d58b35d..ba7fe8ca 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -323 +323 @@ name = "datasets" -version = "2.6.1" +version = "2.7.0" @@ -331 +331 @@ aiohttp = "*" -dill = "<0.3.6" +dill = "<0.3.7" @@ -351 +351 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "spacy (>=3.0.0)", "tldextract", "toml (>=0.10.1)", "typer (<0.5.0)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos"] @@ -355 +355 @@ s3 = ["fsspec", "boto3", "botocore", "s3fs"] -tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)"] +tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] @@ -357 +357 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "spacy (>=3.0.0)", "tldextract", "toml (>=0.10.1)", "typer (<0.5.0)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos"] @@ -2286 +2286 @@ python-versions = "3.9.6" -content-hash = "c728e53c216c19c8a2e8e63b01dc1675e15e040e3b0166c7a9716bba283bf829" +content-hash = "616b43e42f6a9596a385c0883ef97f588224c80e8e233352759d5f33dc35f5df" diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 1621d42a..d04ca7f9 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.6.1" } +datasets = { extras = ["audio", "vision"], version = "~2.7.0" } diff --git a/workers/splits/src/splits/config.py b/workers/splits/src/splits/config.py index 63422109..927d48d8 100644 --- a/workers/splits/src/splits/config.py +++ b/workers/splits/src/splits/config.py @@ -26 +25,0 @@ class WorkerConfig: - datasets.config.HUB_DATASETS_URL = self.common.hf_endpoint + "/datasets/{repo_id}/resolve/{revision}/{path}" @@ -30,0 +30,5 @@ class WorkerConfig: + + # Note: self.common.hf_endpoint is ignored by the huggingface_hub library for now (see + # the discussion at https://github.com/huggingface/datasets/pull/5196), and this breaks + # various of the datasets functions. The fix, for now, is to set the HF_ENDPOINT + # environment variable to the desired value. diff --git a/workers/splits/tests/fixtures/hub.py b/workers/splits/tests/fixtures/hub.py index c0c04ad8..0bfe2485 100644 --- a/workers/splits/tests/fixtures/hub.py +++ b/workers/splits/tests/fixtures/hub.py @@ -29 +29,9 @@ CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co" -CI_HUB_DATASETS_URL = CI_HUB_ENDPOINT + "/datasets/{repo_id}/resolve/{revision}/{path}" +CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE = CI_HUB_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" + + [email protected](autouse=True) +def ci_hfh_hf_hub_url(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr( + "huggingface_hub.file_download.HUGGINGFACE_CO_URL_TEMPLATE", CI_HFH_HUGGINGFACE_CO_URL_TEMPLATE + ) + @@ -33 +40,0 @@ datasets.config.HF_ENDPOINT = CI_HUB_ENDPOINT -datasets.config.HUB_DATASETS_URL = CI_HUB_DATASETS_URL
d17a26ea9ffdaac0d66cd278e03cb562eb93fcf0
Sylvain Lesage
2022-11-18T12:12:29
Refactor common cache entry (#634)
diff --git a/.vscode/monorepo.code-workspace b/.vscode/monorepo.code-workspace index 6a144da6..b4ce202d 100644 --- a/.vscode/monorepo.code-workspace +++ b/.vscode/monorepo.code-workspace @@ -19,4 +18,0 @@ - { - "name": "libs/libqueue", - "path": "../libs/libqueue" - }, @@ -26,0 +23,4 @@ + { + "name": "libs/libqueue", + "path": "../libs/libqueue" + }, @@ -39,4 +38,0 @@ - { - "name": "workers/splits", - "path": "../workers/splits" - }, @@ -45,0 +42,4 @@ + }, + { + "name": "workers/splits", + "path": "../workers/splits" diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 093e67b8..6a9bbea2 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-61c45d0" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-dfa89b1" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-a1de302", - "api": "huggingface/datasets-server-services-api:sha-61c45d0" + "admin": "huggingface/datasets-server-services-admin:sha-dfa89b1", + "api": "huggingface/datasets-server-services-api:sha-dfa89b1" @@ -12,2 +12,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-61c45d0", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-61c45d0" + "splits": "huggingface/datasets-server-workers-splits:sha-dfa89b1", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-dfa89b1" diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml index c2f81e0f..e11dbb25 100644 --- a/chart/templates/jobs/mongodb-migration/job.yaml +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -13 +13 @@ metadata: - "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded + "helm.sh/hook-delete-policy": before-hook-creation diff --git a/e2e/tests/test_31_admin_metrics.py b/e2e/tests/test_31_admin_metrics.py index 504aa35b..8d4e2e6e 100644 --- a/e2e/tests/test_31_admin_metrics.py +++ b/e2e/tests/test_31_admin_metrics.py @@ -33 +33 @@ def test_metrics(): - for endpoint in ["/splits", "/first-rows"]: + for queue in ["/splits", "/first-rows"]: @@ -36,2 +36,3 @@ def test_metrics(): - name="queue_jobs_total", labels={"pid": "[0-9]*", "queue": endpoint, "status": "started"}, metrics=metrics - ), f"queue_jobs_total - endpoint={endpoint} not found in {metrics}" + name="queue_jobs_total", labels={"pid": "[0-9]*", "queue": queue, "status": "started"}, metrics=metrics + ), f"queue_jobs_total - queue={queue} not found in {metrics}" + for cache_kind in ["/splits", "/first-rows"]: @@ -42 +43 @@ def test_metrics(): - labels={"error_code": "None", "http_status": "200", "path": endpoint, "pid": "[0-9]*"}, + labels={"error_code": "None", "http_status": "200", "kind": cache_kind, "pid": "[0-9]*"}, @@ -44 +45 @@ def test_metrics(): - ), f"responses_in_cache_total - endpoint {endpoint} not found in {metrics}" + ), f"responses_in_cache_total - cache kind {cache_kind} not found in {metrics}" diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 5436af28..6bf49116 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -237 +237 @@ name = "libcache" -version = "0.3.4" +version = "0.4.0" @@ -252 +252 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl" @@ -677 +677 @@ python-versions = "3.9.6" -content-hash = "27f59b12b7c1b7cc620d908dd09859ce5c59393bac5a13587773838bf49f4a99" +content-hash = "ca4d205575932710b63e247f3e14064e2cb3b727eb50167682b80a5232a87540" @@ -724 +724 @@ libcache = [ - {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, + {file = "libcache-0.4.0-py3-none-any.whl", hash = "sha256:fd89c5935b219a67783283f35611f61d983b6df8dc79d687c470b3fb9754741d"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index 2aa153e6..8123d4fb 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl", develop = false } diff --git a/jobs/mongodb_migration/src/mongodb_migration/check.py b/jobs/mongodb_migration/src/mongodb_migration/check.py index 180d0042..1eb021be 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/check.py +++ b/jobs/mongodb_migration/src/mongodb_migration/check.py @@ -4,0 +5 @@ +import logging @@ -35,16 +36,20 @@ def check_documents(DocCls: DocumentClass, sample_size: int, custom_validation: - # general validation (types and values) - doc.validate() - - # load all subfields, - # this may trigger additional queries if you have ReferenceFields - # so it may be slow - for field in doc._fields: - try: - getattr(doc, field) - except Exception: - print(f"Could not load field {field} in Document {doc.id}") - raise - - # custom validation - if custom_validation is not None: - custom_validation(doc) + try: + # general validation (types and values) + doc.validate() + + # load all subfields, + # this may trigger additional queries if you have ReferenceFields + # so it may be slow + for field in doc._fields: + try: + getattr(doc, field) + except Exception: + logging.error(f"Could not load field {field} in Document {doc.id}. Document: {doc.to_json()}") + raise + + # custom validation + if custom_validation is not None: + custom_validation(doc) + except Exception as e: + logging.error(f"Validation error on document {doc.id}: {e}. Document: {doc.to_json()}") + raise e diff --git a/jobs/mongodb_migration/src/mongodb_migration/collector.py b/jobs/mongodb_migration/src/mongodb_migration/collector.py index b7091191..01ce2590 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/collector.py +++ b/jobs/mongodb_migration/src/mongodb_migration/collector.py @@ -10,0 +11,3 @@ from mongodb_migration.migrations._20221116133500_queue_job_add_force import ( +from mongodb_migration.migrations._20221117223000_cache_generic_response import ( + MigrationMoveToGenericCachedResponse, +) @@ -20,0 +24,4 @@ class MigrationsCollector: + MigrationMoveToGenericCachedResponse( + version="20221117223000", + description="replace SplitsResponse and FirstRowsResponse with a generic CachedResponse", + ), diff --git a/jobs/mongodb_migration/src/mongodb_migration/main.py b/jobs/mongodb_migration/src/mongodb_migration/main.py index 8773d957..10838526 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/main.py +++ b/jobs/mongodb_migration/src/mongodb_migration/main.py @@ -3,0 +4,2 @@ +import sys + @@ -11 +13,5 @@ if __name__ == "__main__": - Plan(collected_migrations=collected_migrations).execute() + try: + Plan(collected_migrations=collected_migrations).execute() + sys.exit(0) + except Exception: + sys.exit(1) diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py new file mode 100644 index 00000000..273c946c --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221117223000_cache_generic_response.py @@ -0,0 +1,204 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +import types +from datetime import datetime, timezone +from enum import Enum +from http import HTTPStatus +from typing import Generic, Type, TypeVar + +from bson import ObjectId +from mongoengine import Document +from mongoengine.connection import get_db +from mongoengine.fields import ( + DateTimeField, + DictField, + EnumField, + ObjectIdField, + StringField, +) +from mongoengine.queryset.queryset import QuerySet + +from mongodb_migration.check import check_documents +from mongodb_migration.migration import Migration + + +class CacheKind(Enum): + SPLITS = "/splits" + FIRST_ROWS = "/first-rows" + + +db_name = "cache" +splitsResponseCollection = "splitsResponse" +firstRowsResponseCollection = "firstRowsResponse" +cachedResponseCollection = "cachedResponsesBlue" + + +# connection already occurred in the main.py (caveat: we use globals) +class MigrationMoveToGenericCachedResponse(Migration): + def up(self) -> None: + # See https://docs.mongoengine.org/guide/migration.html#example-1-addition-of-a-field + logging.info( + f"Create the {cachedResponseCollection} collection, and fill it with the data from splits and first-rows" + ) + db = get_db(db_name) + # Copy the data from the previous collections (splitsResponse, firstRowsResponse) to + # the new generic collection (cachedResponse) + for splits_response in db[splitsResponseCollection].find(): + if not isinstance(splits_response, dict): + # for mypy + raise ValueError("splits_response should be a dict") + db[cachedResponseCollection].insert_one( + { + "_id": splits_response.get("_id"), + "kind": CacheKind.SPLITS.value, + # ^ "kind" is a new field + "dataset": splits_response.get("dataset_name"), + "config": None, + "split": None, + # ^ "config" and "split" are None for kind=/splits + "http_status": splits_response.get("http_status"), + "error_code": splits_response.get("error_code"), + "content": splits_response.get("response"), + # ^ "response" field has been renamed to "content" + "worker_version": splits_response.get("worker_version"), + "dataset_git_revision": splits_response.get("dataset_git_revision"), + "details": splits_response.get("details"), + "updated_at": splits_response.get("updated_at"), + # "stale" field is not used anymore + } + ) + for first_rows_response in db[firstRowsResponseCollection].find(): + if not isinstance(first_rows_response, dict): + # for mypy + raise ValueError("first_rows_response should be a dict") + db[cachedResponseCollection].insert_one( + { + "_id": first_rows_response.get("_id"), + "kind": CacheKind.FIRST_ROWS.value, + # ^ "kind" is a new field + "dataset": first_rows_response.get("dataset_name"), + "config": first_rows_response.get("config_name"), + "split": first_rows_response.get("split_name"), + # ^ "config" and "split" are None for kind=/splits + "http_status": first_rows_response.get("http_status"), + "error_code": first_rows_response.get("error_code"), + "content": first_rows_response.get("response"), + # ^ "response" field has been renamed to "content" + "worker_version": first_rows_response.get("worker_version"), + "dataset_git_revision": first_rows_response.get("dataset_git_revision"), + "details": first_rows_response.get("details"), + "updated_at": first_rows_response.get("updated_at"), + # "stale" field is not used anymore + } + ) + # We will not delete the old collections for now. It will be made in a later migration. + # Also: no need to create indexes on the new collection, mongoengine will do it automatically on the next + # request. + + def down(self) -> None: + logging.info(f"Delete the {cachedResponseCollection} collection") + db = get_db(db_name) + db[cachedResponseCollection].drop() + + def validate(self) -> None: + logging.info("Validate the migrated documents") + + def custom_validation(doc: CachedResponseSnapshot) -> None: + if doc.kind not in (CacheKind.SPLITS.value, CacheKind.FIRST_ROWS.value): + raise ValueError("kind should be /splits or /first-rows") + + check_documents(DocCls=CachedResponseSnapshot, sample_size=10, custom_validation=custom_validation) + + db = get_db(db_name) + splits_responses_count = db[splitsResponseCollection].count_documents({}) + first_rows_responses_count = db[firstRowsResponseCollection].count_documents({}) + cached_responses_count = CachedResponseSnapshot.objects.count() + if splits_responses_count + first_rows_responses_count > cached_responses_count: + raise ValueError( + f"Some documents are missing in the new collection: splitsResponse ({splits_responses_count})," + f" firstRowsResponse ({first_rows_responses_count}), cachedResponseBlue ({cached_responses_count})" + ) + + +# --- CachedResponseSnapshot --- + +# START monkey patching ### hack ### +# see https://github.com/sbdchd/mongo-types#install +U = TypeVar("U", bound=Document) + + +def no_op(self, x): # type: ignore + return self + + +QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) + + +class QuerySetManager(Generic[U]): + def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: + return QuerySet(cls, cls._get_collection()) + + +# END monkey patching ### hack ### + + +def get_datetime() -> datetime: + return datetime.now(timezone.utc) + + +# cache of any endpoint +class CachedResponseSnapshot(Document): + """A response to an endpoint request, cached in the mongoDB database + + Args: + kind (`str`): The kind of the cached response, identifies the endpoint + dataset (`str`): The requested dataset. + config (`str`, optional): The requested config, if any. + split (`str`, optional): The requested split, if any. + http_status (`HTTPStatus`): The HTTP status code. + error_code (`str`, optional): The error code, if any. + content (`dict`): The content of the cached response. Can be an error or a valid content. + details (`dict`, optional): Additional details, eg. a detailed error that we don't want to send as a response. + updated_at (`datetime`): When the cache entry has been last updated. + worker_version (`str`): The semver version of the worker that cached the response. + dataset_git_revision (`str`): The commit (of the git dataset repo) used to generate the response. + """ + + id = ObjectIdField(db_field="_id", primary_key=True, default=ObjectId) + + kind = StringField(required=True, unique_with=["dataset", "config", "split"]) + dataset = StringField(required=True) + config = StringField() + split = StringField() + + http_status = EnumField(HTTPStatus, required=True) + error_code = StringField() + content = DictField(required=True) + worker_version = StringField() + dataset_git_revision = StringField() + + details = DictField() + updated_at = DateTimeField(default=get_datetime) + + meta = { + "collection": cachedResponseCollection, + "db_alias": db_name, + "indexes": [ + ("kind", "dataset", "config", "split"), + ("dataset", "kind", "http_status"), + ("kind", "http_status", "dataset"), + ("kind", "http_status", "error_code"), + ("kind", "id"), + ], + } + objects = QuerySetManager["CachedResponseSnapshot"]() + + +# Fix issue with mongoengine: https://github.com/MongoEngine/mongoengine/issues/1242#issuecomment-810501601 +# mongoengine automatically sets "config" and "splits" as required fields, because they are listed in the unique_with +# field of the "kind" field. But it's an error, since unique indexes (which are used to enforce unique_with) accept +# null values, see https://www.mongodb.com/docs/v5.0/core/index-unique/#unique-index-and-missing-field. +CachedResponseSnapshot.config.required = False # type: ignore +CachedResponseSnapshot.split.required = False # type: ignore diff --git a/jobs/mongodb_migration/src/mongodb_migration/plan.py b/jobs/mongodb_migration/src/mongodb_migration/plan.py index b0012836..81707874 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/plan.py +++ b/jobs/mongodb_migration/src/mongodb_migration/plan.py @@ -55,0 +56,2 @@ class Plan: + raise e + # ^ the script must stop with an error code diff --git a/jobs/mongodb_migration/tests/test_plan.py b/jobs/mongodb_migration/tests/test_plan.py index c7503ffc..f83b5df9 100644 --- a/jobs/mongodb_migration/tests/test_plan.py +++ b/jobs/mongodb_migration/tests/test_plan.py @@ -4 +3,0 @@ -import datetime @@ -8,2 +6,0 @@ import pytest -from libcache.simple_cache import SplitsResponse -from libqueue.queue import Job, Status @@ -11 +7,0 @@ from libqueue.queue import Job, Status -from mongodb_migration.check import check_documents @@ -120,2 +116,2 @@ def test_collected_migrations_order_dont_matter(collected_migrations: List[Migra - ([migration_error_in_up], [], None), - ([migration_error_in_validate], [], None), + ([migration_error_in_up], [], RuntimeError), + ([migration_error_in_validate], [], RuntimeError), @@ -124 +120 @@ def test_collected_migrations_order_dont_matter(collected_migrations: List[Migra - ([migration_ok_a, migration_error_in_up], [], None), + ([migration_ok_a, migration_error_in_up], [], RuntimeError), @@ -205,59 +200,0 @@ def test_execute_is_idempotent(): - - -def test_queue_and_cache(): - # prepare - for i in range(100): - Job( - type="queue_a", - dataset=f"dataset{i}", - config="config", - split="split", - unicity_id=f"abc{str(i)}", - namespace="dataset", - created_at=datetime.datetime.now(), - status=Status.WAITING, - ).save() - # Remove the field "stale", to simulate that we add it now - splits_response_collection = SplitsResponse._get_collection() - splits_response_collection.update_many({}, {"$unset": {"stale": False}}) - - class MigrationQueue(Migration): - def up(self) -> None: - job_collection = Job._get_collection() - job_collection.update_many({}, {"$set": {"status": Status.CANCELLED.value}}) - - def down(self) -> None: - raise IrreversibleMigration() - - def validate(self) -> None: - def custom_validation(doc: Job) -> None: - if doc.status != Status.CANCELLED: - raise ValueError("status is not cancelled") - - check_documents(DocCls=Job, sample_size=10, custom_validation=custom_validation) - if Job.objects(unicity_id="abc0").count() != 1: - raise ValueError('Job "abc0" not found') - - class MigrationCache(Migration): - def up(self) -> None: - splits_response_collection = SplitsResponse._get_collection() - splits_response_collection.update_many({}, {"$set": {"stale": False}}) - - def down(self) -> None: - splits_response_collection = SplitsResponse._get_collection() - splits_response_collection.update_many({}, {"$unset": {"stale": False}}) - - def validate(self) -> None: - def custom_validation(doc: SplitsResponse) -> None: - if not hasattr(doc, "stale"): - raise ValueError("Missing field 'stale'") - - check_documents(DocCls=SplitsResponse, sample_size=10, custom_validation=custom_validation) - - plan = Plan( - collected_migrations=[ - MigrationQueue(version="20221114223000", description="cancel jobs"), - MigrationCache(version="20221114223001", description="add stale field"), - ] - ) - plan.execute() diff --git a/libs/libcache/dist/libcache-0.4.0-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.0-py3-none-any.whl new file mode 100644 index 00000000..06a00313 Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.0-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.4.0.tar.gz b/libs/libcache/dist/libcache-0.4.0.tar.gz new file mode 100644 index 00000000..62741a68 Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.0.tar.gz differ diff --git a/libs/libcache/dist/libcache-0.4.0a0-py3-none-any.whl b/libs/libcache/dist/libcache-0.4.0a0-py3-none-any.whl new file mode 100644 index 00000000..8d6bbd7f Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.0a0-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.4.0a0.tar.gz b/libs/libcache/dist/libcache-0.4.0a0.tar.gz new file mode 100644 index 00000000..599e1e82 Binary files /dev/null and b/libs/libcache/dist/libcache-0.4.0a0.tar.gz differ diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index ed1dfe0b..c25c6824 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -5 +5 @@ name = "libcache" -version = "0.3.4" +version = "0.4.0" diff --git a/libs/libcache/src/libcache/simple_cache.py b/libs/libcache/src/libcache/simple_cache.py index 3c63e531..ec529df0 100644 --- a/libs/libcache/src/libcache/simple_cache.py +++ b/libs/libcache/src/libcache/simple_cache.py @@ -7 +7 @@ from http import HTTPStatus -from typing import Any, Dict, Generic, List, Optional, Tuple, Type, TypedDict, TypeVar +from typing import Dict, Generic, List, Optional, Set, Type, TypedDict, TypeVar @@ -13 +12,0 @@ from mongoengine.fields import ( - BooleanField, @@ -50,23 +49,3 @@ def get_datetime() -> datetime: -# cache of the /splits endpoint -class SplitsResponse(Document): - id = ObjectIdField(db_field="_id", primary_key=True, default=ObjectId) - dataset_name = StringField(required=True, unique=True) - http_status = EnumField(HTTPStatus, required=True) - error_code = StringField(required=False) - response = DictField(required=True) # can be an error or a valid content. Not important here. - details = DictField(required=False) # can be a detailed error when we don't want to put it in the response. - stale = BooleanField(required=False, default=False) - updated_at = DateTimeField(default=get_datetime) - worker_version = StringField(required=False) - dataset_git_revision = StringField(required=False) - - meta = { - "collection": "splitsResponse", - "db_alias": "cache", - "indexes": [ - "dataset_name", - ("http_status", "error_code"), - ("dataset_name", "-updated_at"), - ], - } - objects = QuerySetManager["SplitsResponse"]() +# cache of any endpoint +class CachedResponse(Document): + """A response to an endpoint request, cached in the mongoDB database @@ -73,0 +53,13 @@ class SplitsResponse(Document): + Args: + kind (`str`): The kind of the cached response, identifies the endpoint + dataset (`str`): The requested dataset. + config (`str`, optional): The requested config, if any. + split (`str`, optional): The requested split, if any. + http_status (`HTTPStatus`): The HTTP status code. + error_code (`str`, optional): The error code, if any. + content (`dict`): The content of the cached response. Can be an error or a valid content. + details (`dict`, optional): Additional details, eg. a detailed error that we don't want to send as a response. + updated_at (`datetime`): When the cache entry has been last updated. + worker_version (`str`): The semver version of the worker that cached the response. + dataset_git_revision (`str`): The commit (of the git dataset repo) used to generate the response. + """ @@ -75,2 +66,0 @@ class SplitsResponse(Document): -# cache of the /first-rows endpoint -class FirstRowsResponse(Document): @@ -78,3 +68,6 @@ class FirstRowsResponse(Document): - dataset_name = StringField(required=True, unique_with=["config_name", "split_name"]) - config_name = StringField(required=True) - split_name = StringField(required=True) + + kind = StringField(required=True, unique_with=["dataset", "config", "split"]) + dataset = StringField(required=True) + config = StringField() + split = StringField() + @@ -82,4 +75,6 @@ class FirstRowsResponse(Document): - error_code = StringField(required=False) - response = DictField(required=True) # can be an error or a valid content. Not important here. - details = DictField(required=False) # can be a detailed error when we don't want to put it in the response. - stale = BooleanField(required=False, default=False) + error_code = StringField() + content = DictField(required=True) + worker_version = StringField() + dataset_git_revision = StringField() + + details = DictField() @@ -87,2 +81,0 @@ class FirstRowsResponse(Document): - worker_version = StringField(required=False) - dataset_git_revision = StringField(required=False) @@ -91 +84 @@ class FirstRowsResponse(Document): - "collection": "firstRowsResponse", + "collection": "cachedResponsesBlue", @@ -94,6 +87,5 @@ class FirstRowsResponse(Document): - ("dataset_name", "config_name", "split_name"), - ("dataset_name", "http_status"), - ("http_status", "dataset_name"), - # ^ this index (reversed) is used for the "distinct" command to get the names of the valid datasets - ("http_status", "error_code"), - ("dataset_name", "-updated_at"), + ("kind", "dataset", "config", "split"), + ("dataset", "kind", "http_status"), + ("kind", "http_status", "dataset"), + ("kind", "http_status", "error_code"), + ("kind", "id"), @@ -102 +94 @@ class FirstRowsResponse(Document): - objects = QuerySetManager["FirstRowsResponse"]() + objects = QuerySetManager["CachedResponse"]() @@ -105 +97,6 @@ class FirstRowsResponse(Document): -AnyResponse = TypeVar("AnyResponse", SplitsResponse, FirstRowsResponse) +# Fix issue with mongoengine: https://github.com/MongoEngine/mongoengine/issues/1242#issuecomment-810501601 +# mongoengine automatically sets "config" and "splits" as required fields, because they are listed in the unique_with +# field of the "kind" field. But it's an error, since unique indexes (which are used to enforce unique_with) accept +# null values, see https://www.mongodb.com/docs/v5.0/core/index-unique/#unique-index-and-missing-field. +CachedResponse.config.required = False # type: ignore +CachedResponse.split.required = False # type: ignore @@ -108 +104,0 @@ AnyResponse = TypeVar("AnyResponse", SplitsResponse, FirstRowsResponse) -# /splits endpoint @@ -110,3 +106,4 @@ AnyResponse = TypeVar("AnyResponse", SplitsResponse, FirstRowsResponse) -def upsert_splits_response( - dataset_name: str, - response: Dict, +def upsert_response( + kind: str, + dataset: str, + content: Dict, @@ -113,0 +111,2 @@ def upsert_splits_response( + config: Optional[str] = None, + split: Optional[str] = None, @@ -119 +118,2 @@ def upsert_splits_response( - SplitsResponse.objects(dataset_name=dataset_name).upsert_one( + CachedResponse.objects(kind=kind, dataset=dataset, config=config, split=split).upsert_one( + content=content, @@ -122,2 +121,0 @@ def upsert_splits_response( - response=response, - stale=False, @@ -125 +122,0 @@ def upsert_splits_response( - updated_at=get_datetime(), @@ -127,0 +125 @@ def upsert_splits_response( + updated_at=get_datetime(), @@ -131,2 +129,4 @@ def upsert_splits_response( -def delete_splits_responses(dataset_name: str): - SplitsResponse.objects(dataset_name=dataset_name).delete() +def delete_response( + kind: str, dataset: str, config: Optional[str] = None, split: Optional[str] = None +) -> Optional[int]: + return CachedResponse.objects(kind=kind, dataset=dataset, config=config, split=split).delete() @@ -135,2 +135,2 @@ def delete_splits_responses(dataset_name: str): -def mark_splits_responses_as_stale(dataset_name: str): - SplitsResponse.objects(dataset_name=dataset_name).update(stale=True, updated_at=get_datetime()) +def delete_dataset_responses(dataset: str) -> Optional[int]: + return CachedResponse.objects(dataset=dataset).delete() @@ -139,2 +139 @@ def mark_splits_responses_as_stale(dataset_name: str): -class SplitsCacheEntry(TypedDict): - response: Dict +class CacheEntryWithoutContent(TypedDict): @@ -148,2 +147,8 @@ class SplitsCacheEntry(TypedDict): -def get_splits_response(dataset_name: str) -> SplitsCacheEntry: - split_response = SplitsResponse.objects(dataset_name=dataset_name).get() +def get_response_without_content( + kind: str, dataset: str, config: Optional[str] = None, split: Optional[str] = None +) -> CacheEntryWithoutContent: + response = ( + CachedResponse.objects(kind=kind, dataset=dataset, config=config, split=split) + .only("http_status", "error_code", "worker_version", "dataset_git_revision") + .get() + ) @@ -151,5 +156,4 @@ def get_splits_response(dataset_name: str) -> SplitsCacheEntry: - "response": split_response.response, - "http_status": split_response.http_status, - "error_code": split_response.error_code, - "worker_version": split_response.worker_version, - "dataset_git_revision": split_response.dataset_git_revision, + "http_status": response.http_status, + "error_code": response.error_code, + "worker_version": response.worker_version, + "dataset_git_revision": response.dataset_git_revision, @@ -159,54 +163,2 @@ def get_splits_response(dataset_name: str) -> SplitsCacheEntry: -# /first-rows endpoint -# Note: we let the exceptions throw (ie DocumentTooLarge): it's the responsibility of the caller to manage them -def upsert_first_rows_response( - dataset_name: str, - config_name: str, - split_name: str, - response: Dict, - http_status: HTTPStatus, - error_code: Optional[str] = None, - details: Optional[Dict] = None, - worker_version: Optional[str] = None, - dataset_git_revision: Optional[str] = None, -) -> None: - FirstRowsResponse.objects(dataset_name=dataset_name, config_name=config_name, split_name=split_name).upsert_one( - http_status=http_status, - error_code=error_code, - response=response, - stale=False, - details=details, - updated_at=get_datetime(), - worker_version=worker_version, - dataset_git_revision=dataset_git_revision, - ) - - -def delete_first_rows_responses( - dataset_name: str, config_name: Optional[str] = None, split_name: Optional[str] = None -): - objects = ( - FirstRowsResponse.objects(dataset_name=dataset_name) - if config_name is None - else FirstRowsResponse.objects(dataset_name=dataset_name, config_name=config_name, split_name=split_name) - ) - objects.delete() - - -def mark_first_rows_responses_as_stale( - dataset_name: str, config_name: Optional[str] = None, split_name: Optional[str] = None -): - objects = ( - FirstRowsResponse.objects(dataset_name=dataset_name) - if config_name is None - else FirstRowsResponse.objects(dataset_name=dataset_name, config_name=config_name, split_name=split_name) - ) - objects.update(stale=True, updated_at=get_datetime()) - - -# Note: it's the same definition as SplitsCacheEntry -class FirstRowsCacheEntry(TypedDict): - response: Dict - http_status: HTTPStatus - error_code: Optional[str] - worker_version: Optional[str] - dataset_git_revision: Optional[str] +class CacheEntry(CacheEntryWithoutContent): + content: Dict @@ -216,4 +168,6 @@ class FirstRowsCacheEntry(TypedDict): -def get_first_rows_response(dataset_name: str, config_name: str, split_name: str) -> FirstRowsCacheEntry: - first_rows_response = FirstRowsResponse.objects( - dataset_name=dataset_name, config_name=config_name, split_name=split_name - ).get() +def get_response(kind: str, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> CacheEntry: + response = ( + CachedResponse.objects(kind=kind, dataset=dataset, config=config, split=split) + .only("content", "http_status", "error_code", "worker_version", "dataset_git_revision") + .get() + ) @@ -221,5 +175,5 @@ def get_first_rows_response(dataset_name: str, config_name: str, split_name: str - "response": first_rows_response.response, - "http_status": first_rows_response.http_status, - "error_code": first_rows_response.error_code, - "worker_version": first_rows_response.worker_version, - "dataset_git_revision": first_rows_response.dataset_git_revision, + "content": response.content, + "http_status": response.http_status, + "error_code": response.error_code, + "worker_version": response.worker_version, + "dataset_git_revision": response.dataset_git_revision, @@ -229,28 +183,5 @@ def get_first_rows_response(dataset_name: str, config_name: str, split_name: str -def get_dataset_first_rows_response_splits(dataset_name: str) -> List[Tuple[str, str, str]]: - return [ - (firstRowResponse.dataset_name, firstRowResponse.config_name, firstRowResponse.split_name) - for firstRowResponse in FirstRowsResponse.objects(dataset_name=dataset_name).only( - "dataset_name", "config_name", "split_name" - ) - ] - - -# /valid endpoint - - -def get_valid_dataset_names() -> List[str]: - # a dataset is considered valid if: - # - the /splits response is valid - candidate_dataset_names = set(SplitsResponse.objects(http_status=HTTPStatus.OK).distinct("dataset_name")) - # - at least one of the /first-rows responses is valid - candidate_dataset_names_in_first_rows = set( - FirstRowsResponse.objects(http_status=HTTPStatus.OK).distinct("dataset_name") - ) - - candidate_dataset_names.intersection_update(candidate_dataset_names_in_first_rows) - # note that the list is sorted alphabetically for consistency - return sorted(candidate_dataset_names) - - -# /is-valid endpoint - +class ResponseId(TypedDict): + kind: str + dataset: str + config: Optional[str] + split: Optional[str] @@ -258,9 +188,0 @@ def get_valid_dataset_names() -> List[str]: -def is_dataset_name_valid(dataset_name: str) -> bool: - # a dataset is considered valid if: - # - the /splits response is valid - # - at least one of the /first-rows responses is valid - valid_split_responses = SplitsResponse.objects(dataset_name=dataset_name, http_status=HTTPStatus.OK).count() - valid_first_rows_responses = FirstRowsResponse.objects( - dataset_name=dataset_name, http_status=HTTPStatus.OK - ).count() - return (valid_split_responses == 1) and (valid_first_rows_responses > 0) @@ -267,0 +190,10 @@ def is_dataset_name_valid(dataset_name: str) -> bool: +def get_dataset_response_ids(dataset: str) -> List[ResponseId]: + return [ + { + "kind": response.kind, + "dataset": response.dataset, + "config": response.config, + "split": response.split, + } + for response in CachedResponse.objects(dataset=dataset).only("kind", "dataset", "config", "split") + ] @@ -269 +200,0 @@ def is_dataset_name_valid(dataset_name: str) -> bool: -# admin /metrics endpoint @@ -271 +202,2 @@ def is_dataset_name_valid(dataset_name: str) -> bool: -CountByHttpStatusAndErrorCode = Dict[str, Dict[Optional[str], int]] +def get_valid_datasets(kind: str) -> Set[str]: + return set(CachedResponse.objects(kind=kind, http_status=HTTPStatus.OK).distinct("dataset")) @@ -274 +206,3 @@ CountByHttpStatusAndErrorCode = Dict[str, Dict[Optional[str], int]] -def get_entries_count_by_status_and_error_code(entries: QuerySet[AnyResponse]) -> CountByHttpStatusAndErrorCode: +def get_validity_by_kind(dataset: str) -> Dict[str, bool]: + # TODO: rework with aggregate + entries = CachedResponse.objects(dataset=dataset).only("kind", "http_status") @@ -276,5 +210,2 @@ def get_entries_count_by_status_and_error_code(entries: QuerySet[AnyResponse]) - - str(http_status): { - error_code: entries(http_status=http_status, error_code=error_code).count() - for error_code in entries(http_status=http_status).distinct("error_code") - } - for http_status in sorted(entries.distinct("http_status")) + str(kind): entries(kind=kind, http_status=HTTPStatus.OK).first() is not None + for kind in sorted(entries.distinct("kind")) @@ -284,7 +215 @@ def get_entries_count_by_status_and_error_code(entries: QuerySet[AnyResponse]) - -def get_splits_responses_count_by_status_and_error_code() -> CountByHttpStatusAndErrorCode: - return get_entries_count_by_status_and_error_code(SplitsResponse.objects) - - -def get_first_rows_responses_count_by_status_and_error_code() -> CountByHttpStatusAndErrorCode: - return get_entries_count_by_status_and_error_code(FirstRowsResponse.objects) - +# admin /metrics endpoint @@ -292 +216,0 @@ def get_first_rows_responses_count_by_status_and_error_code() -> CountByHttpStat -# for scripts @@ -293,0 +218,5 @@ def get_first_rows_responses_count_by_status_and_error_code() -> CountByHttpStat +class CountEntry(TypedDict): + kind: str + http_status: int + error_code: Optional[str] + count: int @@ -295,7 +223,0 @@ def get_first_rows_responses_count_by_status_and_error_code() -> CountByHttpStat -def get_datasets_with_some_error() -> List[str]: - # - the /splits response is invalid - candidate_dataset_names = set(SplitsResponse.objects(http_status__ne=HTTPStatus.OK).distinct("dataset_name")) - # - or one of the /first-rows responses is invalid - candidate_dataset_names_in_first_rows = set( - FirstRowsResponse.objects(http_status__ne=HTTPStatus.OK).distinct("dataset_name") - ) @@ -303,2 +225,17 @@ def get_datasets_with_some_error() -> List[str]: - # note that the list is sorted alphabetically for consistency - return sorted(candidate_dataset_names.union(candidate_dataset_names_in_first_rows)) +def get_responses_count_by_kind_status_and_error_code() -> List[CountEntry]: + # TODO: rework with aggregate + # see + # https://stackoverflow.com/questions/47301829/mongodb-distinct-count-for-combination-of-two-fields?noredirect=1&lq=1#comment81555081_47301829 + # and https://docs.mongoengine.org/guide/querying.html#mongodb-aggregation-api + entries = CachedResponse.objects().only("kind", "http_status", "error_code") + return [ + { + "kind": str(kind), + "http_status": int(http_status), + "error_code": error_code, + "count": entries(kind=kind, http_status=http_status, error_code=error_code).count(), + } + for kind in sorted(entries.distinct("kind")) + for http_status in sorted(entries(kind=kind).distinct("http_status")) + for error_code in entries(kind=kind, http_status=http_status).distinct("error_code") + ] @@ -310 +247,2 @@ def get_datasets_with_some_error() -> List[str]: -class SplitsResponseReport(TypedDict): +class ResponseReport(TypedDict): + kind: str @@ -311,0 +250,2 @@ class SplitsResponseReport(TypedDict): + config: Optional[str] + split: Optional[str] @@ -318,12 +258,2 @@ class SplitsResponseReport(TypedDict): -class FirstRowsResponseReport(SplitsResponseReport): - config: str - split: str - - -class CacheReportSplits(TypedDict): - cache_reports: List[SplitsResponseReport] - next_cursor: str - - -class CacheReportFirstRows(TypedDict): - cache_reports: List[FirstRowsResponseReport] +class CacheReport(TypedDict): + cache_reports: List[ResponseReport] @@ -341 +271 @@ class InvalidLimit(Exception): -def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: +def get_cache_reports(kind: str, cursor: Optional[str], limit: int) -> CacheReport: @@ -343 +273 @@ def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: - Get a list of reports about SplitsResponse cache entries, along with the next cursor. + Get a list of reports of the cache entries, along with the next cursor. @@ -345,46 +274,0 @@ def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: - Args: - cursor (`str`): - An opaque string value representing a pointer to a specific SplitsResponse item in the dataset. The - server returns results after the given pointer. - An empty string means to start from the beginning. - limit (strictly positive `int`): - The maximum number of results. - Returns: - [`CacheReportSplits`]: A dict with the list of reports and the next cursor. The next cursor is - an empty string if there are no more items to be fetched. - <Tip> - Raises the following errors: - - [`~libcache.simple_cache.InvalidCursor`] - If the cursor is invalid. - - [`~libcache.simple_cache.InvalidLimit`] - If the limit is an invalid number. - </Tip> - """ - if not cursor: - queryset = SplitsResponse.objects() - else: - try: - queryset = SplitsResponse.objects(id__gt=ObjectId(cursor)) - except InvalidId as err: - raise InvalidCursor("Invalid cursor.") from err - if limit <= 0: - raise InvalidLimit("Invalid limit.") - objects = list( - queryset.order_by("+id") - .only("id", "dataset_name", "http_status", "error_code", "worker_version", "dataset_git_revision") - .limit(limit) - ) - - return { - "cache_reports": [ - { - "dataset": object.dataset_name, - "http_status": object.http_status.value, - "error_code": object.error_code, - "worker_version": object.worker_version, - "dataset_git_revision": object.dataset_git_revision, - } - for object in objects - ], - "next_cursor": "" if len(objects) < limit else str(objects[-1].id), - } @@ -391,0 +276 @@ def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: + The "reports" are the cached entries, without the "content", "details" and "updated_at" fields. @@ -393,4 +277,0 @@ def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: -def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheReportFirstRows: - """ - Get a list of reports about FirstRowsResponse cache entries, along with the next cursor. - See https://solovyov.net/blog/2020/api-pagination-design/. @@ -397,0 +279 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo + kind (str): the kind of the cache entries @@ -405 +287 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo - [`CacheReportFirstRows`]: A dict with the list of reports and the next cursor. The next cursor is + [`CacheReport`]: A dict with the list of reports and the next cursor. The next cursor is @@ -416 +298 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo - queryset = FirstRowsResponse.objects() + queryset = CachedResponse.objects(kind=kind) @@ -419 +301 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo - queryset = FirstRowsResponse.objects(id__gt=ObjectId(cursor)) + queryset = CachedResponse.objects(kind=kind, id__gt=ObjectId(cursor)) @@ -428,3 +310,4 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo - "dataset_name", - "config_name", - "split_name", + "kind", + "dataset", + "config", + "split", @@ -441,3 +324,4 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo - "dataset": object.dataset_name, - "config": object.config_name, - "split": object.split_name, + "kind": kind, + "dataset": object.dataset, + "config": object.config, + "split": object.split, @@ -455,63 +338,0 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo -class FeaturesResponseReport(TypedDict): - dataset: str - config: str - split: str - features: Optional[List[Any]] - - -class CacheReportFeatures(TypedDict): - cache_reports: List[FeaturesResponseReport] - next_cursor: str - - -def get_cache_reports_features(cursor: Optional[str], limit: int) -> CacheReportFeatures: - """ - Get a list of reports on the features (columns), grouped by splits, along with the next cursor. - See https://solovyov.net/blog/2020/api-pagination-design/. - Args: - cursor (`str`): - An opaque string value representing a pointer to a specific FirstRowsResponse item in the dataset. The - server returns results after the given pointer. - An empty string means to start from the beginning. - limit (strictly positive `int`): - The maximum number of results. - Returns: - [`CacheReportFeatures`]: A dict with the list of reports and the next cursor. The next cursor is - an empty string if there are no more items to be fetched. - <Tip> - Raises the following errors: - - [`~libcache.simple_cache.InvalidCursor`] - If the cursor is invalid. - - [`~libcache.simple_cache.InvalidLimit`] - If the limit is an invalid number. - </Tip> - """ - if not cursor: - queryset = FirstRowsResponse.objects() - else: - try: - queryset = FirstRowsResponse.objects(id__gt=ObjectId(cursor)) - except InvalidId as err: - raise InvalidCursor("Invalid cursor.") from err - if limit <= 0: - raise InvalidLimit("Invalid limit.") - objects = list( - queryset(response__features__exists=True) - .order_by("+id") - .only("id", "dataset_name", "config_name", "split_name", "response.features") - .limit(limit) - ) - return { - "cache_reports": [ - { - "dataset": object.dataset_name, - "config": object.config_name, - "split": object.split_name, - "features": object.response["features"], - } - for object in objects - ], - "next_cursor": "" if len(objects) < limit else str(objects[-1].id), - } - - @@ -519,3 +340,2 @@ def get_cache_reports_features(cursor: Optional[str], limit: int) -> CacheReport -def _clean_database() -> None: - SplitsResponse.drop_collection() # type: ignore - FirstRowsResponse.drop_collection() # type: ignore +def _clean_cache_database() -> None: + CachedResponse.drop_collection() # type: ignore diff --git a/libs/libcache/tests/test_simple_cache.py b/libs/libcache/tests/test_simple_cache.py index 20aa09cd..1e8751de 100644 --- a/libs/libcache/tests/test_simple_cache.py +++ b/libs/libcache/tests/test_simple_cache.py @@ -5,0 +6 @@ from time import process_time +from typing import Optional @@ -10,0 +12 @@ from libcache.simple_cache import ( + CachedResponse, @@ -14,17 +16,11 @@ from libcache.simple_cache import ( - _clean_database, - delete_first_rows_responses, - delete_splits_responses, - get_cache_reports_features, - get_cache_reports_first_rows, - get_cache_reports_splits, - get_datasets_with_some_error, - get_first_rows_response, - get_first_rows_responses_count_by_status_and_error_code, - get_splits_response, - get_splits_responses_count_by_status_and_error_code, - get_valid_dataset_names, - is_dataset_name_valid, - mark_first_rows_responses_as_stale, - mark_splits_responses_as_stale, - upsert_first_rows_response, - upsert_splits_response, + _clean_cache_database, + delete_dataset_responses, + delete_response, + get_cache_reports, + get_dataset_response_ids, + get_response, + get_response_without_content, + get_responses_count_by_kind_status_and_error_code, + get_valid_datasets, + get_validity_by_kind, + upsert_response, @@ -36,13 +32,83 @@ def clean_mongo_database() -> None: - _clean_database() - - -def test_upsert_splits_response() -> None: - dataset_name = "test_dataset" - response = {"splits": [{"dataset_name": dataset_name, "config_name": "test_config", "split_name": "test_split"}]} - upsert_splits_response(dataset_name, response, HTTPStatus.OK) - cache_entry = get_splits_response(dataset_name) - assert cache_entry["http_status"] == HTTPStatus.OK - assert cache_entry["response"] == response - assert cache_entry["error_code"] is None - assert cache_entry["worker_version"] is None - assert cache_entry["dataset_git_revision"] is None + _clean_cache_database() + + +def test_insert_null_values() -> None: + kind = "test_kind" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + dataset_c = "test_dataset_c" + config = None + split = None + content = {"some": "content"} + http_status = HTTPStatus.OK + + CachedResponse.objects(kind=kind, dataset=dataset_a, config=config, split=split).upsert_one( + content=content, + http_status=http_status, + ) + assert CachedResponse.objects.count() == 1 + cached_response = CachedResponse.objects.get() + assert cached_response is not None + assert cached_response.config is None + assert "config" not in cached_response.to_json() + cached_response.validate() + + CachedResponse( + kind=kind, dataset=dataset_b, config=config, split=split, content=content, http_status=http_status + ).save() + assert CachedResponse.objects.count() == 2 + cached_response = CachedResponse.objects(dataset=dataset_b).get() + assert cached_response is not None + assert cached_response.config is None + assert "config" not in cached_response.to_json() + + coll = CachedResponse._get_collection() + coll.insert_one( + { + "kind": kind, + "dataset": dataset_c, + "config": None, + "split": None, + "content": content, + "http_status": http_status, + } + ) + assert CachedResponse.objects.count() == 3 + cached_response = CachedResponse.objects(dataset=dataset_c).get() + assert cached_response is not None + assert cached_response.config is None + assert "config" not in cached_response.to_json() + + [email protected]( + "config,split", + [ + (None, None), + ("test_config", None), + ("test_config", "test_split"), + ], +) +def test_upsert_response(config: Optional[str], split: Optional[str]) -> None: + kind = "test_kind" + dataset = "test_dataset" + config = None + split = None + content = {"some": "content"} + upsert_response(kind=kind, dataset=dataset, config=config, split=split, content=content, http_status=HTTPStatus.OK) + cached_response = get_response(kind=kind, dataset=dataset, config=config, split=split) + assert cached_response == { + "http_status": HTTPStatus.OK, + "content": content, + "error_code": None, + "worker_version": None, + "dataset_git_revision": None, + } + cached_response_without_content = get_response_without_content( + kind=kind, dataset=dataset, config=config, split=split + ) + assert cached_response_without_content == { + "http_status": HTTPStatus.OK, + "error_code": None, + "worker_version": None, + "dataset_git_revision": None, + } @@ -51,3 +117,3 @@ def test_upsert_splits_response() -> None: - upsert_splits_response(dataset_name, response, HTTPStatus.OK) - cache_entry2 = get_splits_response(dataset_name) - assert cache_entry2 == cache_entry + upsert_response(kind=kind, dataset=dataset, config=config, split=split, content=content, http_status=HTTPStatus.OK) + cached_response2 = get_response(kind=kind, dataset=dataset, config=config, split=split) + assert cached_response2 == cached_response @@ -55,7 +121,5 @@ def test_upsert_splits_response() -> None: - mark_splits_responses_as_stale(dataset_name) - # we don't have access to the stale field - # we also don't have access to the updated_at field - - delete_splits_responses(dataset_name) - with pytest.raises(DoesNotExist): - get_splits_response(dataset_name) + another_config = "another_config" + upsert_response( + kind=kind, dataset=dataset, config=another_config, split=split, content=content, http_status=HTTPStatus.OK + ) + get_response(kind=kind, dataset=dataset, config=config, split=split) @@ -63 +127 @@ def test_upsert_splits_response() -> None: - mark_splits_responses_as_stale(dataset_name) + delete_dataset_responses(dataset=dataset) @@ -65 +129 @@ def test_upsert_splits_response() -> None: - get_splits_response(dataset_name) + get_response(kind=kind, dataset=dataset, config=config, split=split) @@ -70,4 +134,7 @@ def test_upsert_splits_response() -> None: - upsert_splits_response( - dataset_name, - response, - HTTPStatus.BAD_REQUEST, + upsert_response( + kind=kind, + dataset=dataset, + config=config, + split=split, + content=content, + http_status=HTTPStatus.BAD_REQUEST, @@ -78,25 +144,0 @@ def test_upsert_splits_response() -> None: - cache_entry3 = get_splits_response(dataset_name) - assert cache_entry3["http_status"] == HTTPStatus.BAD_REQUEST - assert cache_entry3["response"] == response - assert cache_entry3["error_code"] == error_code - assert cache_entry3["worker_version"] == worker_version - assert cache_entry3["dataset_git_revision"] == dataset_git_revision - - -def test_upsert_first_rows_response() -> None: - dataset_name = "test_dataset" - config_name = "test_config" - split_name = "test_split" - response = {"key": "value"} - upsert_first_rows_response(dataset_name, config_name, split_name, response, HTTPStatus.OK) - cache_entry = get_first_rows_response(dataset_name, config_name, split_name) - assert cache_entry["http_status"] == HTTPStatus.OK - assert cache_entry["response"] == response - assert cache_entry["error_code"] is None - assert cache_entry["worker_version"] is None - assert cache_entry["dataset_git_revision"] is None - - # ensure it's idempotent - upsert_first_rows_response(dataset_name, config_name, split_name, response, HTTPStatus.OK) - cache_entry2 = get_first_rows_response(dataset_name, config_name, split_name) - assert cache_entry2 == cache_entry @@ -104,4 +146,8 @@ def test_upsert_first_rows_response() -> None: - mark_first_rows_responses_as_stale(dataset_name) - mark_first_rows_responses_as_stale(dataset_name, config_name, split_name) - # we don't have access to the stale field - # we also don't have access to the updated_at field + cached_response3 = get_response(kind=kind, dataset=dataset, config=config, split=split) + assert cached_response3 == { + "http_status": HTTPStatus.BAD_REQUEST, + "content": content, + "error_code": error_code, + "worker_version": worker_version, + "dataset_git_revision": dataset_git_revision, + } @@ -109,3 +154,0 @@ def test_upsert_first_rows_response() -> None: - upsert_first_rows_response(dataset_name, config_name, "test_split2", response, HTTPStatus.OK) - delete_first_rows_responses(dataset_name, config_name, "test_split2") - get_first_rows_response(dataset_name, config_name, split_name) @@ -113 +156,11 @@ def test_upsert_first_rows_response() -> None: - delete_first_rows_responses(dataset_name) +def test_delete_response() -> None: + kind = "test_kind" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + config = None + split = "test_split" + upsert_response(kind=kind, dataset=dataset_a, config=config, split=split, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind, dataset=dataset_b, config=config, split=split, content={}, http_status=HTTPStatus.OK) + get_response(kind=kind, dataset=dataset_a, config=config, split=split) + get_response(kind=kind, dataset=dataset_b, config=config, split=split) + delete_response(kind=kind, dataset=dataset_a, config=config, split=split) @@ -115,4 +168,18 @@ def test_upsert_first_rows_response() -> None: - get_first_rows_response(dataset_name, config_name, split_name) - - mark_first_rows_responses_as_stale(dataset_name) - mark_first_rows_responses_as_stale(dataset_name, config_name, split_name) + get_response(kind=kind, dataset=dataset_a, config=config, split=split) + get_response(kind=kind, dataset=dataset_b, config=config, split=split) + + +def test_delete_dataset_responses() -> None: + kind_a = "test_kind_a" + kind_b = "test_kind_b" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + config = "test_config" + split = "test_split" + upsert_response(kind=kind_a, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind_b, dataset=dataset_a, config=config, split=split, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind_a, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) + get_response(kind=kind_a, dataset=dataset_a) + get_response(kind=kind_b, dataset=dataset_a, config=config, split=split) + get_response(kind=kind_a, dataset=dataset_b) + delete_dataset_responses(dataset=dataset_a) @@ -120,21 +187,4 @@ def test_upsert_first_rows_response() -> None: - get_first_rows_response(dataset_name, config_name, split_name) - - error_code = "error_code" - worker_version = "0.1.2" - dataset_git_revision = "123456" - upsert_first_rows_response( - dataset_name, - config_name, - split_name, - response, - HTTPStatus.BAD_REQUEST, - error_code=error_code, - worker_version=worker_version, - dataset_git_revision=dataset_git_revision, - ) - cache_entry3 = get_first_rows_response(dataset_name, config_name, split_name) - assert cache_entry3["http_status"] == HTTPStatus.BAD_REQUEST - assert cache_entry3["response"] == response - assert cache_entry3["error_code"] == error_code - assert cache_entry3["worker_version"] == worker_version - assert cache_entry3["dataset_git_revision"] == dataset_git_revision + get_response(kind=kind_a, dataset=dataset_a) + with pytest.raises(DoesNotExist): + get_response(kind=kind_b, dataset=dataset_a, config=config, split=split) + get_response(kind=kind_a, dataset=dataset_b) @@ -145,4 +195,5 @@ def test_big_row() -> None: - dataset_name = "test_dataset" - config_name = "test_config" - split_name = "test_split" - big_response = {"content": "a" * 100_000_000} + kind = "test_kind" + dataset = "test_dataset" + config = "test_config" + split = "test_split" + big_content = {"big": "a" * 100_000_000} @@ -150,2 +201,3 @@ def test_big_row() -> None: - upsert_first_rows_response(dataset_name, config_name, split_name, big_response, HTTPStatus.OK) - + upsert_response( + kind=kind, dataset=dataset, config=config, split=split, content=big_content, http_status=HTTPStatus.OK + ) @@ -153,3 +204,0 @@ def test_big_row() -> None: -def test_valid() -> None: - assert get_valid_dataset_names() == [] - assert get_datasets_with_some_error() == [] @@ -157,4 +206,13 @@ def test_valid() -> None: - upsert_splits_response( - "test_dataset", - {"key": "value"}, - HTTPStatus.OK, +def test_get_dataset_response_ids() -> None: + kind_a = "test_kind_a" + kind_b = "test_kind_b" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + dataset_c = "test_dataset_c" + config_a = "test_config_a" + config_b = "test_config_b" + split_a = "test_split_a" + split_b = None + upsert_response(kind=kind_a, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=kind_b, dataset=dataset_a, config=config_a, split=split_a, content={}, http_status=HTTPStatus.OK @@ -161,0 +220,20 @@ def test_valid() -> None: + upsert_response( + kind=kind_b, dataset=dataset_a, config=config_b, split=split_a, content={}, http_status=HTTPStatus.OK + ) + upsert_response( + kind=kind_b, dataset=dataset_a, config=config_b, split=split_b, content={}, http_status=HTTPStatus.OK + ) + upsert_response(kind=kind_a, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) + result = get_dataset_response_ids(dataset=dataset_a) + expected = [ + {"kind": kind_a, "dataset": dataset_a, "config": None, "split": None}, + {"kind": kind_b, "dataset": dataset_a, "config": config_a, "split": split_a}, + {"kind": kind_b, "dataset": dataset_a, "config": config_b, "split": split_b}, + {"kind": kind_b, "dataset": dataset_a, "config": config_b, "split": split_a}, + ] + assert len(result) == len(expected) and all(x in expected for x in result) + # ^ compare the contents of the lists without caring about the order + assert get_dataset_response_ids(dataset=dataset_b) == [ + {"kind": kind_a, "dataset": dataset_b, "config": None, "split": None} + ] + assert get_dataset_response_ids(dataset=dataset_c) == [] @@ -163,5 +240,0 @@ def test_valid() -> None: - assert get_valid_dataset_names() == [] - assert get_datasets_with_some_error() == [] - assert is_dataset_name_valid("test_dataset") is False - assert is_dataset_name_valid("test_dataset2") is False - assert is_dataset_name_valid("test_dataset3") is False @@ -169,9 +242,2 @@ def test_valid() -> None: - upsert_first_rows_response( - "test_dataset", - "test_config", - "test_split", - { - "key": "value", - }, - HTTPStatus.OK, - ) +def test_get_valid_dataset_names_empty() -> None: + assert not get_valid_datasets(kind="test_kind") @@ -179,5 +244,0 @@ def test_valid() -> None: - assert get_valid_dataset_names() == ["test_dataset"] - assert get_datasets_with_some_error() == [] - assert is_dataset_name_valid("test_dataset") is True - assert is_dataset_name_valid("test_dataset2") is False - assert is_dataset_name_valid("test_dataset3") is False @@ -185,5 +246,7 @@ def test_valid() -> None: - upsert_splits_response( - "test_dataset2", - {"key": "value"}, - HTTPStatus.OK, - ) +def test_get_valid_dataset_names_two_valid_datasets() -> None: + kind = "test_kind" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + upsert_response(kind=kind, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) + assert get_valid_datasets(kind=kind) == {dataset_a, dataset_b} @@ -191,5 +253,0 @@ def test_valid() -> None: - assert get_valid_dataset_names() == ["test_dataset"] - assert get_datasets_with_some_error() == [] - assert is_dataset_name_valid("test_dataset") is True - assert is_dataset_name_valid("test_dataset2") is False - assert is_dataset_name_valid("test_dataset3") is False @@ -197,9 +255,9 @@ def test_valid() -> None: - upsert_first_rows_response( - "test_dataset2", - "test_config2", - "test_split2", - { - "key": "value", - }, - HTTPStatus.BAD_REQUEST, - ) +def test_get_valid_dataset_names_filtered_by_kind() -> None: + kind_a = "test_kind_a" + kind_b = "test_kind_b" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + upsert_response(kind=kind_a, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind_b, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) + assert get_valid_datasets(kind=kind_a) == {dataset_a} + assert get_valid_datasets(kind=kind_b) == {dataset_b} @@ -207,5 +264,0 @@ def test_valid() -> None: - assert get_valid_dataset_names() == ["test_dataset"] - assert get_datasets_with_some_error() == ["test_dataset2"] - assert is_dataset_name_valid("test_dataset") is True - assert is_dataset_name_valid("test_dataset2") is False - assert is_dataset_name_valid("test_dataset3") is False @@ -213,8 +266,8 @@ def test_valid() -> None: - upsert_first_rows_response( - "test_dataset2", - "test_config2", - "test_split3", - { - "key": "value", - }, - HTTPStatus.OK, +def test_get_valid_dataset_names_at_least_one_valid_response() -> None: + kind = "test_kind" + dataset = "test_dataset" + config_a = "test_config_a" + config_b = "test_config_b" + upsert_response(kind=kind, dataset=dataset, config=config_a, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=kind, dataset=dataset, config=config_b, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR @@ -221,0 +275 @@ def test_valid() -> None: + assert get_valid_datasets(kind=kind) == {dataset} @@ -223,5 +276,0 @@ def test_valid() -> None: - assert get_valid_dataset_names() == ["test_dataset", "test_dataset2"] - assert get_datasets_with_some_error() == ["test_dataset2"] - assert is_dataset_name_valid("test_dataset") is True - assert is_dataset_name_valid("test_dataset2") is True - assert is_dataset_name_valid("test_dataset3") is False @@ -229,4 +278,10 @@ def test_valid() -> None: - upsert_splits_response( - "test_dataset3", - {"key": "value"}, - HTTPStatus.BAD_REQUEST, +def test_get_valid_dataset_names_only_invalid_responses() -> None: + kind = "test_kind" + dataset = "test_dataset" + config_a = "test_config_a" + config_b = "test_config_b" + upsert_response( + kind=kind, dataset=dataset, config=config_a, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR + ) + upsert_response( + kind=kind, dataset=dataset, config=config_b, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR @@ -233,0 +289 @@ def test_valid() -> None: + assert not get_valid_datasets(kind=kind) @@ -235,5 +290,0 @@ def test_valid() -> None: - assert get_valid_dataset_names() == ["test_dataset", "test_dataset2"] - assert get_datasets_with_some_error() == ["test_dataset2", "test_dataset3"] - assert is_dataset_name_valid("test_dataset") is True - assert is_dataset_name_valid("test_dataset2") is True - assert is_dataset_name_valid("test_dataset3") is False @@ -240,0 +292,2 @@ def test_valid() -> None: +def test_get_validity_by_kind_empty() -> None: + assert not get_validity_by_kind(dataset="dataset") @@ -242,2 +294,0 @@ def test_valid() -> None: -def test_count_by_status_and_error_code() -> None: - assert "OK" not in get_splits_responses_count_by_status_and_error_code() @@ -245,5 +296,8 @@ def test_count_by_status_and_error_code() -> None: - upsert_splits_response( - "test_dataset", - {"key": "value"}, - HTTPStatus.OK, - ) +def test_get_validity_by_kind_two_valid_datasets() -> None: + kind = "test_kind" + dataset_a = "test_dataset_a" + dataset_b = "test_dataset_b" + upsert_response(kind=kind, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) + assert get_validity_by_kind(dataset=dataset_a) == {kind: True} + assert get_validity_by_kind(dataset=dataset_b) == {kind: True} @@ -251,2 +304,0 @@ def test_count_by_status_and_error_code() -> None: - assert get_splits_responses_count_by_status_and_error_code() == {"200": {None: 1}} - assert get_first_rows_responses_count_by_status_and_error_code() == {} @@ -254,8 +306,17 @@ def test_count_by_status_and_error_code() -> None: - upsert_first_rows_response( - "test_dataset", - "test_config", - "test_split", - { - "key": "value", - }, - HTTPStatus.OK, +def test_get_validity_by_kind_two_valid_kinds() -> None: + kind_a = "test_kind_a" + kind_b = "test_kind_b" + dataset = "test_dataset" + upsert_response(kind=kind_a, dataset=dataset, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=kind_b, dataset=dataset, content={}, http_status=HTTPStatus.OK) + assert get_validity_by_kind(dataset=dataset) == {kind_a: True, kind_b: True} + + +def test_get_validity_by_kind_at_least_one_valid_response() -> None: + kind = "test_kind" + dataset = "test_dataset" + config_a = "test_config_a" + config_b = "test_config_b" + upsert_response(kind=kind, dataset=dataset, config=config_a, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=kind, dataset=dataset, config=config_b, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR @@ -262,0 +324 @@ def test_count_by_status_and_error_code() -> None: + assert get_validity_by_kind(dataset=dataset) == {kind: True} @@ -264 +325,0 @@ def test_count_by_status_and_error_code() -> None: - assert get_first_rows_responses_count_by_status_and_error_code() == {"200": {None: 1}} @@ -266,9 +327,10 @@ def test_count_by_status_and_error_code() -> None: - upsert_first_rows_response( - "test_dataset", - "test_config", - "test_split2", - { - "key": "value", - }, - HTTPStatus.INTERNAL_SERVER_ERROR, - error_code="error_code", +def test_get_validity_by_kind_only_invalid_responses() -> None: + kind = "test_kind" + dataset = "test_dataset" + config_a = "test_config_a" + config_b = "test_config_b" + upsert_response( + kind=kind, dataset=dataset, config=config_a, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR + ) + upsert_response( + kind=kind, dataset=dataset, config=config_b, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR @@ -275,0 +338 @@ def test_count_by_status_and_error_code() -> None: + assert get_validity_by_kind(dataset=dataset) == {kind: False} @@ -277,4 +339,0 @@ def test_count_by_status_and_error_code() -> None: - assert get_first_rows_responses_count_by_status_and_error_code() == { - "200": {None: 1}, - "500": {"error_code": 1}, - } @@ -281,0 +341,2 @@ def test_count_by_status_and_error_code() -> None: +def test_count_by_status_and_error_code() -> None: + assert "OK" not in get_responses_count_by_kind_status_and_error_code() @@ -283,6 +344,5 @@ def test_count_by_status_and_error_code() -> None: -def test_get_cache_reports_splits() -> None: - assert get_cache_reports_splits("", 2) == {"cache_reports": [], "next_cursor": ""} - upsert_splits_response( - "a", - {"key": "value"}, - HTTPStatus.OK, + upsert_response( + kind="test_kind", + dataset="test_dataset", + content={"key": "value"}, + http_status=HTTPStatus.OK, @@ -290,27 +350,12 @@ def test_get_cache_reports_splits() -> None: - b_details = { - "error": "error B", - "cause_exception": "ExceptionB", - "cause_message": "Cause message B", - "cause_traceback": ["B"], - } - worker_version = "0.1.2" - dataset_git_revision = "123456" - upsert_splits_response( - "b", - b_details, - HTTPStatus.INTERNAL_SERVER_ERROR, - error_code="ErrorCodeB", - details=b_details, - worker_version=worker_version, - dataset_git_revision=dataset_git_revision, - ) - c_details = { - "error": "error C", - "cause_exception": "ExceptionC", - "cause_message": "Cause message C", - "cause_traceback": ["C"], - } - upsert_splits_response( - "c", - { - "error": c_details["error"], + + assert get_responses_count_by_kind_status_and_error_code() == [ + {"kind": "test_kind", "http_status": 200, "error_code": None, "count": 1} + ] + + upsert_response( + kind="test_kind2", + dataset="test_dataset", + config="test_config", + split="test_split", + content={ + "key": "value", @@ -318,3 +363,2 @@ def test_get_cache_reports_splits() -> None: - HTTPStatus.INTERNAL_SERVER_ERROR, - "ErrorCodeC", - c_details, + http_status=HTTPStatus.INTERNAL_SERVER_ERROR, + error_code="error_code", @@ -322,19 +365,0 @@ def test_get_cache_reports_splits() -> None: - response = get_cache_reports_splits("", 2) - assert response["cache_reports"] == [ - { - "dataset": "a", - "http_status": HTTPStatus.OK.value, - "error_code": None, - "worker_version": None, - "dataset_git_revision": None, - }, - { - "dataset": "b", - "http_status": HTTPStatus.INTERNAL_SERVER_ERROR.value, - "error_code": "ErrorCodeB", - "worker_version": "0.1.2", - "dataset_git_revision": "123456", - }, - ] - assert response["next_cursor"] != "" - next_cursor = response["next_cursor"] @@ -342,13 +367,4 @@ def test_get_cache_reports_splits() -> None: - response = get_cache_reports_splits(next_cursor, 2) - assert response == { - "cache_reports": [ - { - "dataset": "c", - "http_status": HTTPStatus.INTERNAL_SERVER_ERROR.value, - "error_code": "ErrorCodeC", - "worker_version": None, - "dataset_git_revision": None, - }, - ], - "next_cursor": "", - } + assert get_responses_count_by_kind_status_and_error_code() == [ + {"kind": "test_kind", "http_status": 200, "error_code": None, "count": 1}, + {"kind": "test_kind2", "http_status": 500, "error_code": "error_code", "count": 1}, + ] @@ -356,6 +371,0 @@ def test_get_cache_reports_splits() -> None: - with pytest.raises(InvalidCursor): - get_cache_reports_splits("not an objectid", 2) - with pytest.raises(InvalidLimit): - get_cache_reports_splits(next_cursor, -1) - with pytest.raises(InvalidLimit): - get_cache_reports_splits(next_cursor, 0) @@ -362,0 +373,4 @@ def test_get_cache_reports_splits() -> None: +def test_get_cache_reports() -> None: + kind = "test_kind" + kind_2 = "test_kind_2" + assert get_cache_reports(kind=kind, cursor="", limit=2) == {"cache_reports": [], "next_cursor": ""} @@ -364,8 +378,8 @@ def test_get_cache_reports_splits() -> None: -def test_get_cache_reports_first_rows() -> None: - assert get_cache_reports_first_rows("", 2) == {"cache_reports": [], "next_cursor": ""} - upsert_first_rows_response( - "a", - "config", - "split", - {"key": "value"}, - HTTPStatus.OK, + dataset_a = "test_dataset_a" + content_a = {"key": "a"} + http_status_a = HTTPStatus.OK + upsert_response( + kind=kind, + dataset=dataset_a, + content=content_a, + http_status=http_status_a, @@ -373,5 +387,8 @@ def test_get_cache_reports_first_rows() -> None: - b_details = { - "error": "error B", - "cause_exception": "ExceptionB", - "cause_message": "Cause message B", - "cause_traceback": ["B"], + + dataset_b = "test_dataset_b" + config_b = "test_config_b" + content_b = {"key": "b"} + http_status_b = HTTPStatus.INTERNAL_SERVER_ERROR + error_code_b = "error_code_b" + details_b = { + "error": "error b", @@ -379,12 +396,12 @@ def test_get_cache_reports_first_rows() -> None: - worker_version = "0.1.2" - dataset_git_revision = "123456" - upsert_first_rows_response( - "b", - "config", - "split", - b_details, - HTTPStatus.INTERNAL_SERVER_ERROR, - error_code="ErrorCodeB", - details=b_details, - worker_version=worker_version, - dataset_git_revision=dataset_git_revision, + worker_version_b = "0.1.2" + dataset_git_revision_b = "123456" + upsert_response( + kind=kind, + dataset=dataset_b, + config=config_b, + content=content_b, + details=details_b, + http_status=http_status_b, + error_code=error_code_b, + worker_version=worker_version_b, + dataset_git_revision=dataset_git_revision_b, @@ -392,5 +409,9 @@ def test_get_cache_reports_first_rows() -> None: - c_details = { - "error": "error C", - "cause_exception": "ExceptionC", - "cause_message": "Cause message C", - "cause_traceback": ["C"], + + dataset_c = "test_dataset_c" + config_c = "test_config_c" + split_c = "test_split_c" + content_c = {"key": "c"} + http_status_c = HTTPStatus.INTERNAL_SERVER_ERROR + error_code_c = "error_code_c" + details_c = { + "error": "error c", @@ -398,10 +419,9 @@ def test_get_cache_reports_first_rows() -> None: - upsert_first_rows_response( - "c", - "config", - "split", - { - "error": c_details["error"], - }, - HTTPStatus.INTERNAL_SERVER_ERROR, - "ErrorCodeC", - c_details, + upsert_response( + kind=kind, + dataset=dataset_c, + config=config_c, + split=split_c, + content=content_c, + details=details_c, + http_status=http_status_c, + error_code=error_code_c, @@ -409 +429,10 @@ def test_get_cache_reports_first_rows() -> None: - response = get_cache_reports_first_rows("", 2) + upsert_response( + kind=kind_2, + dataset=dataset_c, + content=content_c, + details=details_c, + http_status=http_status_c, + error_code=error_code_c, + ) + + response = get_cache_reports(kind=kind, cursor="", limit=2) @@ -412,4 +441,5 @@ def test_get_cache_reports_first_rows() -> None: - "dataset": "a", - "config": "config", - "split": "split", - "http_status": HTTPStatus.OK.value, + "kind": kind, + "dataset": dataset_a, + "config": None, + "split": None, + "http_status": http_status_a.value, @@ -421,7 +451,8 @@ def test_get_cache_reports_first_rows() -> None: - "dataset": "b", - "config": "config", - "split": "split", - "http_status": HTTPStatus.INTERNAL_SERVER_ERROR.value, - "error_code": "ErrorCodeB", - "worker_version": "0.1.2", - "dataset_git_revision": "123456", + "kind": kind, + "dataset": dataset_b, + "config": config_b, + "split": None, + "http_status": http_status_b.value, + "error_code": error_code_b, + "worker_version": worker_version_b, + "dataset_git_revision": dataset_git_revision_b, @@ -433 +464 @@ def test_get_cache_reports_first_rows() -> None: - response = get_cache_reports_first_rows(next_cursor, 2) + response = get_cache_reports(kind=kind, cursor=next_cursor, limit=2) @@ -437,5 +468,6 @@ def test_get_cache_reports_first_rows() -> None: - "dataset": "c", - "config": "config", - "split": "split", - "http_status": HTTPStatus.INTERNAL_SERVER_ERROR.value, - "error_code": "ErrorCodeC", + "kind": kind, + "dataset": dataset_c, + "config": config_c, + "split": split_c, + "http_status": http_status_c.value, + "error_code": error_code_c, @@ -450,94 +482 @@ def test_get_cache_reports_first_rows() -> None: - get_cache_reports_first_rows("not an objectid", 2) - with pytest.raises(InvalidLimit): - get_cache_reports_first_rows(next_cursor, -1) - with pytest.raises(InvalidLimit): - get_cache_reports_first_rows(next_cursor, 0) - - [email protected]("num_entries", [100, 1_000]) -def test_stress_get_cache_reports_first_rows(num_entries: int) -> None: - MAX_SECONDS = 0.1 - assert get_cache_reports_first_rows("", 2) == {"cache_reports": [], "next_cursor": ""} - split_names = [f"split{i}" for i in range(num_entries)] - for split_name in split_names: - upsert_first_rows_response( - "dataset", - "config", - split_name, - {"key": "value"}, - HTTPStatus.OK, - ) - - next_cursor = "" - is_first: bool = True - while next_cursor != "" or is_first: - start = process_time() - is_first = False - response = get_cache_reports_first_rows(next_cursor, 100) - next_cursor = response["next_cursor"] - assert process_time() - start < MAX_SECONDS - - -def test_get_cache_reports_features() -> None: - assert get_cache_reports_features("", 2) == {"cache_reports": [], "next_cursor": ""} - upsert_first_rows_response( - "a", - "config", - "split", - {"key": "value"}, - HTTPStatus.OK, - ) - b_details = { - "error": "error B", - "cause_exception": "ExceptionB", - "cause_message": "Cause message B", - "cause_traceback": ["B"], - } - upsert_first_rows_response( - "b", - "config", - "split", - b_details, - HTTPStatus.INTERNAL_SERVER_ERROR, - "ErrorCodeB", - b_details, - ) - upsert_first_rows_response( - "c", - "config", - "split", - {"features": "value"}, - HTTPStatus.OK, - ) - upsert_first_rows_response( - "d", - "config", - "split", - {"features": "value2"}, - HTTPStatus.OK, - ) - upsert_first_rows_response( - "e", - "config", - "split", - {"features": "value3"}, - HTTPStatus.OK, - ) - response = get_cache_reports_features("", 2) - assert response["cache_reports"] == [ - {"dataset": "c", "config": "config", "split": "split", "features": "value"}, - {"dataset": "d", "config": "config", "split": "split", "features": "value2"}, - ] - assert response["next_cursor"] != "" - next_cursor = response["next_cursor"] - - response = get_cache_reports_features(next_cursor, 2) - assert response == { - "cache_reports": [ - {"dataset": "e", "config": "config", "split": "split", "features": "value3"}, - ], - "next_cursor": "", - } - - with pytest.raises(InvalidCursor): - get_cache_reports_features("not an objectid", 2) + get_cache_reports(kind=kind, cursor="not an objectid", limit=2) @@ -545 +484 @@ def test_get_cache_reports_features() -> None: - get_cache_reports_features(next_cursor, -1) + get_cache_reports(kind=kind, cursor=next_cursor, limit=-1) @@ -547 +486 @@ def test_get_cache_reports_features() -> None: - get_cache_reports_features(next_cursor, 0) + get_cache_reports(kind=kind, cursor=next_cursor, limit=0) @@ -550,2 +489,2 @@ def test_get_cache_reports_features() -> None: [email protected]("num_entries", [100, 1_000]) -def test_stress_get_cache_reports_features(num_entries: int) -> None: [email protected]("num_entries", [1, 10, 100, 1_000]) +def test_stress_get_cache_reports(num_entries: int) -> None: @@ -553,9 +492,12 @@ def test_stress_get_cache_reports_features(num_entries: int) -> None: - assert get_cache_reports_features("", 2) == {"cache_reports": [], "next_cursor": ""} - split_names = [f"split{i}" for i in range(num_entries)] - for split_name in split_names: - upsert_first_rows_response( - "dataset", - "config", - split_name, - {"features": "value"}, - HTTPStatus.OK, + kind = "test_kind" + content = {"key": "value"} + http_status = HTTPStatus.OK + splits = [f"split{i}" for i in range(num_entries)] + for split in splits: + upsert_response( + kind=kind, + dataset="dataset", + config="config", + split=split, + content=content, + http_status=http_status, @@ -569 +511 @@ def test_stress_get_cache_reports_features(num_entries: int) -> None: - response = get_cache_reports_features(next_cursor, 100) + response = get_cache_reports(kind=kind, cursor=next_cursor, limit=100) diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index 401e200e..c056d341 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -107 +107 @@ class Job(Document): - split (`str`, optional): The config on which to apply the job. + split (`str`, optional): The split on which to apply the job. diff --git a/services/admin/README.md b/services/admin/README.md index 44310e50..4482dc9c 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -63,3 +62,0 @@ The scripts: -- `refresh-cache`: add a /splits job for every HF dataset -- `refresh-cache-canonical`: add a /splits job for every HF canonical dataset -- `refresh-cache-errors`: add a /splits job for every erroneous HF dataset diff --git a/services/admin/Scripts.mk b/services/admin/Scripts.mk index 6d120522..3608dd4f 100644 --- a/services/admin/Scripts.mk +++ b/services/admin/Scripts.mk @@ -8,12 +7,0 @@ cancel-jobs-first-rows: - -.PHONY: refresh-cache -refresh-cache: - poetry run python src/admin/scripts/refresh_cache.py - -.PHONY: refresh-cache-canonical -refresh-cache-canonical: - poetry run python src/admin/scripts/refresh_cache_canonical.py - -.PHONY: refresh-cache-errors -refresh-cache-errors: - poetry run python src/admin/scripts/refresh_cache_errors.py diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 797799c6..1b717a74 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -3 +3 @@ name = "anyio" -version = "3.6.1" +version = "3.6.2" @@ -16 +16 @@ test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytes -trio = ["trio (>=0.16)"] +trio = ["trio (>=0.16,<0.22)"] @@ -124 +124 @@ name = "colorama" -version = "0.4.5" +version = "0.4.6" @@ -128 +128 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" @@ -312 +312 @@ name = "libcache" -version = "0.3.4" +version = "0.4.0" @@ -327 +327 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl" @@ -367 +367 @@ name = "marshmallow" -version = "3.18.0" +version = "3.19.0" @@ -377,3 +377,3 @@ packaging = ">=17.0" -dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] -docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] -lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)"] @@ -435 +435 @@ name = "orjson" -version = "3.8.0" +version = "3.8.1" @@ -454 +454 @@ name = "pathspec" -version = "0.10.1" +version = "0.10.2" @@ -462 +462 @@ name = "pbr" -version = "5.10.0" +version = "5.11.0" @@ -470,2 +470,2 @@ name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -477,2 +477,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] @@ -543 +543 @@ name = "pymongo" -version = "3.12.3" +version = "3.13.0" @@ -678 +678 @@ name = "ruamel.yaml.clib" -version = "0.2.6" +version = "0.2.7" @@ -747 +747 @@ name = "stevedore" -version = "4.0.1" +version = "4.1.1" @@ -859 +859 @@ python-versions = "3.9.6" -content-hash = "10fcf72c819681e16af3961e2ace8a2631b251459b4662dc52dfe9b7ee394bc1" +content-hash = "2f15816860d00b548fa2fefdd71bf0ac6fb0269655c68d9521f084ab5ddd0e3a" @@ -862,4 +862 @@ content-hash = "10fcf72c819681e16af3961e2ace8a2631b251459b4662dc52dfe9b7ee394bc1 -anyio = [ - {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, - {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, -] +anyio = [] @@ -886,4 +883 @@ click = [ -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] +colorama = [] @@ -920 +914 @@ libcache = [ - {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, + {file = "libcache-0.4.0-py3-none-any.whl", hash = "sha256:fd89c5935b219a67783283f35611f61d983b6df8dc79d687c470b3fb9754741d"}, @@ -973,4 +967 @@ pbr = [] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +platformdirs = [] @@ -998,109 +989 @@ pyflakes = [ -pymongo = [ - {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8c7ad5cab282f53b9d78d51504330d1c88c83fbe187e472c07e6908a0293142e"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a766157b195a897c64945d4ff87b050bb0e763bb78f3964e996378621c703b00"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8d6bf6fcd42cde2f02efb8126812a010c297eacefcd090a609639d2aeda6185"}, - {file = "pymongo-3.12.3-cp27-cp27m-win32.whl", hash = "sha256:5fdffb0cfeb4dc8646a5381d32ec981ae8472f29c695bf09e8f7a8edb2db12ca"}, - {file = "pymongo-3.12.3-cp27-cp27m-win_amd64.whl", hash = "sha256:648fcfd8e019b122b7be0e26830a3a2224d57c3e934f19c1e53a77b8380e6675"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3f0ac6e0203bd88863649e6ed9c7cfe53afab304bc8225f2597c4c0a74e4d1f0"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:71c0db2c313ea8a80825fb61b7826b8015874aec29ee6364ade5cb774fe4511b"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b779e87300635b8075e8d5cfd4fdf7f46078cd7610c381d956bca5556bb8f97"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:351a2efe1c9566c348ad0076f4bf541f4905a0ebe2d271f112f60852575f3c16"}, - {file = "pymongo-3.12.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a02313e71b7c370c43056f6b16c45effbb2d29a44d24403a3d5ba6ed322fa3f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:d3082e5c4d7b388792124f5e805b469109e58f1ab1eb1fbd8b998e8ab766ffb7"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:514e78d20d8382d5b97f32b20c83d1d0452c302c9a135f0a9022236eb9940fda"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:b1b5be40ebf52c3c67ee547e2c4435ed5bc6352f38d23e394520b686641a6be4"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:58db209da08a502ce6948841d522dcec80921d714024354153d00b054571993c"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:5296e5e69243ffd76bd919854c4da6630ae52e46175c804bc4c0e050d937b705"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:51d1d061df3995c2332ae78f036492cc188cb3da8ef122caeab3631a67bb477e"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b974b7f49d65a16ca1435bc1c25a681bb7d630509dd23b2e819ed36da0b7f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e099b79ccf7c40f18b149a64d3d10639980035f9ceb223169dd806ff1bb0d9cc"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27e5ea64332385385b75414888ce9d1a9806be8616d7cef4ef409f4f256c6d06"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed7d11330e443aeecab23866055e08a5a536c95d2c25333aeb441af2dbac38d2"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93111fd4e08fa889c126aa8baf5c009a941880a539c87672e04583286517450a"}, - {file = "pymongo-3.12.3-cp310-cp310-win32.whl", hash = "sha256:2301051701b27aff2cbdf83fae22b7ca883c9563dfd088033267291b46196643"}, - {file = "pymongo-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:c7e8221278e5f9e2b6d3893cfc3a3e46c017161a57bb0e6f244826e4cee97916"}, - {file = "pymongo-3.12.3-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:7b4a9fcd95e978cd3c96cdc2096aa54705266551422cf0883c12a4044def31c6"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:06b64cdf5121f86b78a84e61b8f899b6988732a8d304b503ea1f94a676221c06"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:c8f7dd025cb0bf19e2f60a64dfc24b513c8330e0cfe4a34ccf941eafd6194d9e"}, - {file = "pymongo-3.12.3-cp34-cp34m-win32.whl", hash = "sha256:ab23b0545ec71ea346bf50a5d376d674f56205b729980eaa62cdb7871805014b"}, - {file = "pymongo-3.12.3-cp34-cp34m-win_amd64.whl", hash = "sha256:1b5cb75d2642ff7db823f509641f143f752c0d1ab03166cafea1e42e50469834"}, - {file = "pymongo-3.12.3-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:fc2048d13ff427605fea328cbe5369dce549b8c7657b0e22051a5b8831170af6"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c5f83bb59d0ff60c6fdb1f8a7b0288fbc4640b1f0fd56f5ae2387749c35d34e3"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6632b1c63d58cddc72f43ab9f17267354ddce563dd5e11eadabd222dcc808808"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fedad05147b40ff8a93fcd016c421e6c159f149a2a481cfa0b94bfa3e473bab"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:208a61db8b8b647fb5b1ff3b52b4ed6dbced01eac3b61009958adb203596ee99"}, - {file = "pymongo-3.12.3-cp35-cp35m-win32.whl", hash = "sha256:3100a2352bdded6232b385ceda0c0a4624598c517d52c2d8cf014b7abbebd84d"}, - {file = "pymongo-3.12.3-cp35-cp35m-win_amd64.whl", hash = "sha256:3492ae1f97209c66af70e863e6420e6301cecb0a51a5efa701058aa73a8ca29e"}, - {file = "pymongo-3.12.3-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:87e18f29bac4a6be76a30e74de9c9005475e27100acf0830679420ce1fd9a6fd"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b3e08aef4ea05afbc0a70cd23c13684e7f5e074f02450964ec5cfa1c759d33d2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e66b3c9f8b89d4fd58a59c04fdbf10602a17c914fbaaa5e6ea593f1d54b06362"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5d67dbc8da2dac1644d71c1839d12d12aa333e266a9964d5b1a49feed036bc94"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:a351986d6c9006308f163c359ced40f80b6cffb42069f3e569b979829951038d"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5296669bff390135528001b4e48d33a7acaffcd361d98659628ece7f282f11aa"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:9d5b66d457d2c5739c184a777455c8fde7ab3600a56d8bbebecf64f7c55169e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:1c771f1a8b3cd2d697baaf57e9cfa4ae42371cacfbea42ea01d9577c06d92f96"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81a3ebc33b1367f301d1c8eda57eec4868e951504986d5d3fe437479dcdac5b2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cf113a46d81cff0559d57aa66ffa473d57d1a9496f97426318b6b5b14fdec1c"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64b9122be1c404ce4eb367ad609b590394587a676d84bfed8e03c3ce76d70560"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6c71e198b36f0f0dfe354f06d3655ecfa30d69493a1da125a9a54668aad652"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33ab8c031f788609924e329003088831045f683931932a52a361d4a955b7dce2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2b4c95c47fb81b19ea77dc1c50d23af3eba87c9628fcc2e03d44124a3d336ea"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4e0a3ea7fd01cf0a36509f320226bd8491e0f448f00b8cb89f601c109f6874e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-win32.whl", hash = "sha256:dfec57f15f53d677b8e4535695ff3f37df7f8fe431f2efa8c3c8c4025b53d1eb"}, - {file = "pymongo-3.12.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c22591cff80188dd8543be0b559d0c807f7288bd353dc0bcfe539b4588b3a5cd"}, - {file = "pymongo-3.12.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:7738147cd9dbd6d18d5593b3491b4620e13b61de975fd737283e4ad6c255c273"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:be1f10145f7ea76e3e836fdc5c8429c605675bdcddb0bca9725ee6e26874c00c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:295a5beaecb7bf054c1c6a28749ed72b19f4d4b61edcd8a0815d892424baf780"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:320f8734553c50cffe8a8e1ae36dfc7d7be1941c047489db20a814d2a170d7b5"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:5d20072d81cbfdd8e15e6a0c91fc7e3a4948c71e0adebfc67d3b4bcbe8602711"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:2c46a0afef69d61938a6fe32c3afd75b91dec3ab3056085dc72abbeedcc94166"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:5f530f35e1a57d4360eddcbed6945aecdaee2a491cd3f17025e7b5f2eea88ee7"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6526933760ee1e6090db808f1690a111ec409699c1990efc96f134d26925c37f"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95d15cf81cd2fb926f2a6151a9f94c7aacc102b415e72bc0e040e29332b6731c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d52a70350ec3dfc39b513df12b03b7f4c8f8ec6873bbf958299999db7b05eb1"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9252c991e8176b5a2fa574c5ab9a841679e315f6e576eb7cf0bd958f3e39b0ad"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:145d78c345a38011497e55aff22c0f8edd40ee676a6810f7e69563d68a125e83"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8e0a086dbbee406cc6f603931dfe54d1cb2fba585758e06a2de01037784b737"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6d5443104f89a840250087863c91484a72f254574848e951d1bdd7d8b2ce7c9"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6f93dbfa5a461107bc3f5026e0d5180499e13379e9404f07a9f79eb5e9e1303d"}, - {file = "pymongo-3.12.3-cp37-cp37m-win32.whl", hash = "sha256:c9d212e2af72d5c8d082775a43eb726520e95bf1c84826440f74225843975136"}, - {file = "pymongo-3.12.3-cp37-cp37m-win_amd64.whl", hash = "sha256:320a1fe403dd83a35709fcf01083d14bc1462e9789b711201349a9158db3a87e"}, - {file = "pymongo-3.12.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1ba93be779a9b8e5e44f5c133dc1db4313661cead8a2fd27661e6cb8d942ee9"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4294f2c1cd069b793e31c2e6d7ac44b121cf7cedccd03ebcc30f3fc3417b314a"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:845b178bd127bb074835d2eac635b980c58ec5e700ebadc8355062df708d5a71"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:176fdca18391e1206c32fb1d8265628a84d28333c20ad19468d91e3e98312cd1"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:28bfd5244d32faf3e49b5a8d1fab0631e922c26e8add089312e4be19fb05af50"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:f38b35ecd2628bf0267761ed659e48af7e620a7fcccfccf5774e7308fb18325c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:cebb3d8bcac4a6b48be65ebbc5c9881ed4a738e27bb96c86d9d7580a1fb09e05"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:80710d7591d579442c67a3bc7ae9dcba9ff95ea8414ac98001198d894fc4ff46"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d7baa847383b9814de640c6f1a8553d125ec65e2761ad146ea2e75a7ad197c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:602284e652bb56ca8760f8e88a5280636c5b63d7946fca1c2fe0f83c37dffc64"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2d763d05ec7211313a06e8571236017d3e61d5fef97fcf34ec4b36c0b6556"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6e4dccae8ef5dd76052647d78f02d5d0ffaff1856277d951666c54aeba3ad2"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1fc4d3985868860b6585376e511bb32403c5ffb58b0ed913496c27fd791deea"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4e5d163e6644c2bc84dd9f67bfa89288c23af26983d08fefcc2cbc22f6e57e6"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8d92c6bb9174d47c2257528f64645a00bbc6324a9ff45a626192797aff01dc14"}, - {file = "pymongo-3.12.3-cp38-cp38-win32.whl", hash = "sha256:b0db9a4691074c347f5d7ee830ab3529bc5ad860939de21c1f9c403daf1eda9a"}, - {file = "pymongo-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:d81047341ab56061aa4b6823c54d4632579c3b16e675089e8f520e9b918a133b"}, - {file = "pymongo-3.12.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07398d8a03545b98282f459f2603a6bb271f4448d484ed7f411121a519a7ea48"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b7df0d99e189b7027d417d4bfd9b8c53c9c7ed5a0a1495d26a6f547d820eca88"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a283425e6a474facd73072d8968812d1d9058490a5781e022ccf8895500b83ce"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2577b8161eeae4dd376d13100b2137d883c10bb457dd08935f60c9f9d4b5c5f6"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:517b09b1dd842390a965a896d1327c55dfe78199c9f5840595d40facbcd81854"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2567885ff0c8c7c0887ba6cefe4ae4af96364a66a7069f924ce0cd12eb971d04"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:71c5c200fd37a5322706080b09c3ec8907cf01c377a7187f354fc9e9e13abc73"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:14dee106a10b77224bba5efeeb6aee025aabe88eb87a2b850c46d3ee55bdab4a"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f340a2a908644ea6cccd399be0fb308c66e05d2800107345f9f0f0d59e1731c4"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b4c535f524c9d8c86c3afd71d199025daa070859a2bdaf94a298120b0de16db"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8455176fd1b86de97d859fed4ae0ef867bf998581f584c7a1a591246dfec330f"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf254a1a95e95fdf4eaa25faa1ea450a6533ed7a997f9f8e49ab971b61ea514d"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a3540e21213cb8ce232e68a7d0ee49cdd35194856c50b8bd87eeb572fadd42"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0e7a5d0b9077e8c3e57727f797ee8adf12e1d5e7534642230d98980d160d1320"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0be605bfb8461384a4cb81e80f51eb5ca1b89851f2d0e69a75458c788a7263a4"}, - {file = "pymongo-3.12.3-cp39-cp39-win32.whl", hash = "sha256:2157d68f85c28688e8b723bbe70c8013e0aba5570e08c48b3562f74d33fc05c4"}, - {file = "pymongo-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfa217bf8cf3ff6b30c8e6a89014e0c0e7b50941af787b970060ae5ba04a4ce5"}, - {file = "pymongo-3.12.3-py2.7-macosx-10.14-intel.egg", hash = "sha256:d81299f63dc33cc172c26faf59cc54dd795fc6dd5821a7676cca112a5ee8bbd6"}, - {file = "pymongo-3.12.3.tar.gz", hash = "sha256:0a89cadc0062a5e53664dde043f6c097172b8c1c5f0094490095282ff9995a5f"}, -] +pymongo = [] diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index f2e466c8..e5e4afd3 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -11 +11 @@ huggingface-hub = "^0.11.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index da98937b..6e5bee6b 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -20,0 +21 @@ from admin.routes.pending_jobs import create_pending_jobs_endpoint +from admin.utils import CacheKind @@ -56,0 +58 @@ def create_app() -> Starlette: + # TODO: re-enable. Possibly using tags @@ -58,10 +60,10 @@ def create_app() -> Starlette: - Route( - "/cache-reports/features", - endpoint=create_cache_reports_endpoint( - endpoint="features", - cache_reports_num_results=app_config.admin.cache_reports_num_results, - max_age=app_config.admin.max_age, - external_auth_url=app_config.admin.external_auth_url, - organization=app_config.admin.hf_organization, - ), - ), + # Route( + # "/cache-reports/features", + # endpoint=create_cache_reports_endpoint( + # cache_kind="features", + # cache_reports_num_results=app_config.admin.cache_reports_num_results, + # max_age=app_config.admin.max_age, + # external_auth_url=app_config.admin.external_auth_url, + # organization=app_config.admin.hf_organization, + # ), + # ), @@ -71 +73 @@ def create_app() -> Starlette: - endpoint="first-rows", + kind=CacheKind.FIRST_ROWS, @@ -81 +83 @@ def create_app() -> Starlette: - endpoint="splits", + kind=CacheKind.SPLITS, diff --git a/services/admin/src/admin/dataset.py b/services/admin/src/admin/dataset.py index c34cc773..622c8c82 100644 --- a/services/admin/src/admin/dataset.py +++ b/services/admin/src/admin/dataset.py @@ -9,4 +8,0 @@ from huggingface_hub.utils import RepositoryNotFoundError -from libcache.simple_cache import ( - mark_first_rows_responses_as_stale, - mark_splits_responses_as_stale, -) @@ -49,2 +44,0 @@ def update_splits(dataset: str, force: bool = False) -> None: - mark_splits_responses_as_stale(dataset_name=dataset) - mark_first_rows_responses_as_stale(dataset_name=dataset) @@ -56 +49,0 @@ def update_first_rows(dataset: str, config: str, split: str, force: bool = False - mark_first_rows_responses_as_stale(dataset_name=dataset, config_name=config, split_name=split) diff --git a/services/admin/src/admin/prometheus.py b/services/admin/src/admin/prometheus.py index 09855314..7c3657c2 100644 --- a/services/admin/src/admin/prometheus.py +++ b/services/admin/src/admin/prometheus.py @@ -6,4 +6 @@ import os -from libcache.simple_cache import ( - get_first_rows_responses_count_by_status_and_error_code, - get_splits_responses_count_by_status_and_error_code, -) +from libcache.simple_cache import get_responses_count_by_kind_status_and_error_code @@ -36 +33 @@ RESPONSES_IN_CACHE_TOTAL = Gauge( - labelnames=["path", "http_status", "error_code"], + labelnames=["kind", "http_status", "error_code"], @@ -66,10 +63,4 @@ class Prometheus: - for http_status, by_error_code in get_splits_responses_count_by_status_and_error_code().items(): - for error_code, total in by_error_code.items(): - RESPONSES_IN_CACHE_TOTAL.labels(path="/splits", http_status=http_status, error_code=error_code).set( - total - ) - for http_status, by_error_code in get_first_rows_responses_count_by_status_and_error_code().items(): - for error_code, total in by_error_code.items(): - RESPONSES_IN_CACHE_TOTAL.labels( - path="/first-rows", http_status=http_status, error_code=error_code - ).set(total) + for metric in get_responses_count_by_kind_status_and_error_code(): + RESPONSES_IN_CACHE_TOTAL.labels( + kind=metric["kind"], http_status=metric["http_status"], error_code=metric["error_code"] + ).set(metric["count"]) diff --git a/services/admin/src/admin/routes/cache_reports.py b/services/admin/src/admin/routes/cache_reports.py index 849b8038..c26a8e81 100644 --- a/services/admin/src/admin/routes/cache_reports.py +++ b/services/admin/src/admin/routes/cache_reports.py @@ -5 +5 @@ import logging -from typing import Callable, Dict, Literal, Optional +from typing import Optional @@ -7,7 +7 @@ from typing import Callable, Dict, Literal, Optional -from libcache.simple_cache import ( - InvalidCursor, - InvalidLimit, - get_cache_reports_features, - get_cache_reports_first_rows, - get_cache_reports_splits, -) +from libcache.simple_cache import InvalidCursor, InvalidLimit, get_cache_reports @@ -19,0 +14 @@ from admin.utils import ( + CacheKind, @@ -27,9 +21,0 @@ from admin.utils import ( -EndpointName = Literal["features", "first-rows", "splits"] - - -get_cache_reports: Dict[EndpointName, Callable] = { - "features": get_cache_reports_features, - "first-rows": get_cache_reports_first_rows, - "splits": get_cache_reports_splits, -} - @@ -38 +24 @@ def create_cache_reports_endpoint( - endpoint: EndpointName, + kind: CacheKind, @@ -44,2 +29,0 @@ def create_cache_reports_endpoint( - get_cache_reports = get_cache_reports_features if endpoint == "features" else get_cache_reports_first_rows - @@ -49 +33 @@ def create_cache_reports_endpoint( - logging.info(f"/cache-reports/{endpoint}, cursor={cursor}") + logging.info(f"Cache reports for {kind.value}, cursor={cursor}") @@ -54 +38 @@ def create_cache_reports_endpoint( - get_cache_reports(cursor=cursor, limit=cache_reports_num_results), + get_cache_reports(kind=kind.value, cursor=cursor, limit=cache_reports_num_results), diff --git a/services/admin/src/admin/scripts/refresh_cache.py b/services/admin/src/admin/scripts/refresh_cache.py deleted file mode 100644 index c1cc4aa3..00000000 --- a/services/admin/src/admin/scripts/refresh_cache.py +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging -from typing import List - -from huggingface_hub.hf_api import HfApi -from libqueue.queue import Queue - -from admin.config import AppConfig -from admin.utils import JobType - - -def get_hf_dataset_names(hf_endpoint: str): - return [str(dataset.id) for dataset in HfApi(hf_endpoint).list_datasets(full=False)] - - -def refresh_datasets_cache(dataset_names: List[str]) -> None: - splits_queue = Queue(type=JobType.SPLITS.value) - for dataset_name in dataset_names: - # don't mark the cache entries as stale, because it's manually triggered - splits_queue.add_job(dataset=dataset_name) - logging.info(f"added a job to refresh '{dataset_name}'") - - -if __name__ == "__main__": - app_config = AppConfig() - refresh_datasets_cache(get_hf_dataset_names(hf_endpoint=app_config.common.hf_endpoint)) - logging.info("all the datasets of the Hub have been added to the queue to refresh the cache") diff --git a/services/admin/src/admin/scripts/refresh_cache_canonical.py b/services/admin/src/admin/scripts/refresh_cache_canonical.py deleted file mode 100644 index 70a58a12..00000000 --- a/services/admin/src/admin/scripts/refresh_cache_canonical.py +++ /dev/null @@ -1,19 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging - -from huggingface_hub.hf_api import HfApi - -from admin.config import AppConfig -from admin.scripts.refresh_cache import refresh_datasets_cache - - -def get_hf_canonical_dataset_names(hf_endpoint: str): - return [str(dataset.id) for dataset in HfApi(hf_endpoint).list_datasets(full=False) if dataset.id.find("/") == -1] - - -if __name__ == "__main__": - app_config = AppConfig() - refresh_datasets_cache(get_hf_canonical_dataset_names(hf_endpoint=app_config.common.hf_endpoint)) - logging.info("all the canonical datasets of the Hub have been added to the queue to refresh the cache") diff --git a/services/admin/src/admin/scripts/refresh_cache_errors.py b/services/admin/src/admin/scripts/refresh_cache_errors.py deleted file mode 100644 index 6e2c3556..00000000 --- a/services/admin/src/admin/scripts/refresh_cache_errors.py +++ /dev/null @@ -1,14 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging - -from libcache.simple_cache import get_datasets_with_some_error - -from admin.config import AppConfig -from admin.scripts.refresh_cache import refresh_datasets_cache - -if __name__ == "__main__": - app_config = AppConfig() - refresh_datasets_cache(get_datasets_with_some_error()) - logging.info("all the datasets with some error in the cache have been added to the queue to be refreshed") diff --git a/services/admin/src/admin/utils.py b/services/admin/src/admin/utils.py index 787e4065..62012b15 100644 --- a/services/admin/src/admin/utils.py +++ b/services/admin/src/admin/utils.py @@ -127,0 +128,5 @@ class JobType(Enum): + + +class CacheKind(Enum): + SPLITS = "/splits" + FIRST_ROWS = "/first-rows" diff --git a/services/admin/tests/scripts/__init__.py b/services/admin/tests/scripts/__init__.py deleted file mode 100644 index 1e9d0c5a..00000000 --- a/services/admin/tests/scripts/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. diff --git a/services/admin/tests/scripts/test_refresh_cache_canonical.py b/services/admin/tests/scripts/test_refresh_cache_canonical.py deleted file mode 100644 index adedd56b..00000000 --- a/services/admin/tests/scripts/test_refresh_cache_canonical.py +++ /dev/null @@ -1,15 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from admin.config import AppConfig -from admin.scripts.refresh_cache_canonical import get_hf_canonical_dataset_names - -from ..fixtures.hub import DatasetRepos - - -def test_get_hf_canonical_dataset_names(app_config: AppConfig, hf_dataset_repos_csv_data: DatasetRepos) -> None: - dataset_names = get_hf_canonical_dataset_names(hf_endpoint=app_config.common.hf_endpoint) - assert len(dataset_names) >= 0 - assert hf_dataset_repos_csv_data["public"] not in dataset_names - assert hf_dataset_repos_csv_data["gated"] not in dataset_names - assert hf_dataset_repos_csv_data["private"] not in dataset_names diff --git a/services/admin/tests/test_app.py b/services/admin/tests/test_app.py index 2322c31c..51730175 100644 --- a/services/admin/tests/test_app.py +++ b/services/admin/tests/test_app.py @@ -7 +7 @@ import pytest -from libcache.simple_cache import _clean_database as _clean_cache_database +from libcache.simple_cache import _clean_cache_database diff --git a/services/admin/tests/test_prometheus.py b/services/admin/tests/test_prometheus.py index 24f45481..5c45580a 100644 --- a/services/admin/tests/test_prometheus.py +++ b/services/admin/tests/test_prometheus.py @@ -31,9 +30,0 @@ def test_prometheus(app_config: AppConfig) -> None: - # still empty - assert ( - "responses_in_cache_total{" + additional_field + 'path="/splits",http_status="200",error_code=null}' - not in metrics - ) - assert ( - "responses_in_cache_total{" + additional_field + 'path="/first-rows",http_status="200",error_code=null}' - not in metrics - ) diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 4b6763f8..9acfba1f 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -124 +124 @@ name = "colorama" -version = "0.4.5" +version = "0.4.6" @@ -128 +128 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" @@ -312 +312 @@ name = "jsonschema" -version = "4.16.0" +version = "4.17.0" @@ -328 +328 @@ name = "libcache" -version = "0.3.4" +version = "0.4.0" @@ -343 +343 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl" @@ -391 +391 @@ name = "marshmallow" -version = "3.18.0" +version = "3.19.0" @@ -401,3 +401,3 @@ packaging = ">=17.0" -dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] -docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] -lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)"] @@ -459 +459 @@ name = "orjson" -version = "3.8.0" +version = "3.8.1" @@ -478 +478 @@ name = "pathspec" -version = "0.10.1" +version = "0.10.2" @@ -486 +486 @@ name = "pbr" -version = "5.10.0" +version = "5.11.0" @@ -494,2 +494,2 @@ name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -501,2 +501,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] @@ -540 +540 @@ name = "psutil" -version = "5.9.3" +version = "5.9.4" @@ -567 +567 @@ name = "pymongo" -version = "3.12.3" +version = "3.13.0" @@ -599 +599 @@ name = "pyrsistent" -version = "0.18.1" +version = "0.19.2" @@ -706 +706 @@ name = "ruamel.yaml.clib" -version = "0.2.6" +version = "0.2.7" @@ -775 +775 @@ name = "stevedore" -version = "4.0.1" +version = "4.1.1" @@ -901 +901 @@ python-versions = "3.9.6" -content-hash = "0388f148c99c5bb8ee39a81073ecf4b153321250eb1fb684aba1add0c854ac8b" +content-hash = "f1a5907c7a281a5ca05beb4980cc2abaf02d9fdf5a9e0244f572b348f08ff6a6" @@ -925,4 +925 @@ click = [ -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] +colorama = [] @@ -960 +957 @@ libcache = [ - {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, + {file = "libcache-0.4.0-py3-none-any.whl", hash = "sha256:fd89c5935b219a67783283f35611f61d983b6df8dc79d687c470b3fb9754741d"}, @@ -1014,4 +1011 @@ pbr = [] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +platformdirs = [] @@ -1039,109 +1033 @@ pyflakes = [ -pymongo = [ - {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8c7ad5cab282f53b9d78d51504330d1c88c83fbe187e472c07e6908a0293142e"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a766157b195a897c64945d4ff87b050bb0e763bb78f3964e996378621c703b00"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8d6bf6fcd42cde2f02efb8126812a010c297eacefcd090a609639d2aeda6185"}, - {file = "pymongo-3.12.3-cp27-cp27m-win32.whl", hash = "sha256:5fdffb0cfeb4dc8646a5381d32ec981ae8472f29c695bf09e8f7a8edb2db12ca"}, - {file = "pymongo-3.12.3-cp27-cp27m-win_amd64.whl", hash = "sha256:648fcfd8e019b122b7be0e26830a3a2224d57c3e934f19c1e53a77b8380e6675"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3f0ac6e0203bd88863649e6ed9c7cfe53afab304bc8225f2597c4c0a74e4d1f0"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:71c0db2c313ea8a80825fb61b7826b8015874aec29ee6364ade5cb774fe4511b"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b779e87300635b8075e8d5cfd4fdf7f46078cd7610c381d956bca5556bb8f97"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:351a2efe1c9566c348ad0076f4bf541f4905a0ebe2d271f112f60852575f3c16"}, - {file = "pymongo-3.12.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a02313e71b7c370c43056f6b16c45effbb2d29a44d24403a3d5ba6ed322fa3f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:d3082e5c4d7b388792124f5e805b469109e58f1ab1eb1fbd8b998e8ab766ffb7"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:514e78d20d8382d5b97f32b20c83d1d0452c302c9a135f0a9022236eb9940fda"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:b1b5be40ebf52c3c67ee547e2c4435ed5bc6352f38d23e394520b686641a6be4"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:58db209da08a502ce6948841d522dcec80921d714024354153d00b054571993c"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:5296e5e69243ffd76bd919854c4da6630ae52e46175c804bc4c0e050d937b705"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:51d1d061df3995c2332ae78f036492cc188cb3da8ef122caeab3631a67bb477e"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b974b7f49d65a16ca1435bc1c25a681bb7d630509dd23b2e819ed36da0b7f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e099b79ccf7c40f18b149a64d3d10639980035f9ceb223169dd806ff1bb0d9cc"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27e5ea64332385385b75414888ce9d1a9806be8616d7cef4ef409f4f256c6d06"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed7d11330e443aeecab23866055e08a5a536c95d2c25333aeb441af2dbac38d2"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93111fd4e08fa889c126aa8baf5c009a941880a539c87672e04583286517450a"}, - {file = "pymongo-3.12.3-cp310-cp310-win32.whl", hash = "sha256:2301051701b27aff2cbdf83fae22b7ca883c9563dfd088033267291b46196643"}, - {file = "pymongo-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:c7e8221278e5f9e2b6d3893cfc3a3e46c017161a57bb0e6f244826e4cee97916"}, - {file = "pymongo-3.12.3-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:7b4a9fcd95e978cd3c96cdc2096aa54705266551422cf0883c12a4044def31c6"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:06b64cdf5121f86b78a84e61b8f899b6988732a8d304b503ea1f94a676221c06"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:c8f7dd025cb0bf19e2f60a64dfc24b513c8330e0cfe4a34ccf941eafd6194d9e"}, - {file = "pymongo-3.12.3-cp34-cp34m-win32.whl", hash = "sha256:ab23b0545ec71ea346bf50a5d376d674f56205b729980eaa62cdb7871805014b"}, - {file = "pymongo-3.12.3-cp34-cp34m-win_amd64.whl", hash = "sha256:1b5cb75d2642ff7db823f509641f143f752c0d1ab03166cafea1e42e50469834"}, - {file = "pymongo-3.12.3-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:fc2048d13ff427605fea328cbe5369dce549b8c7657b0e22051a5b8831170af6"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c5f83bb59d0ff60c6fdb1f8a7b0288fbc4640b1f0fd56f5ae2387749c35d34e3"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6632b1c63d58cddc72f43ab9f17267354ddce563dd5e11eadabd222dcc808808"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fedad05147b40ff8a93fcd016c421e6c159f149a2a481cfa0b94bfa3e473bab"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:208a61db8b8b647fb5b1ff3b52b4ed6dbced01eac3b61009958adb203596ee99"}, - {file = "pymongo-3.12.3-cp35-cp35m-win32.whl", hash = "sha256:3100a2352bdded6232b385ceda0c0a4624598c517d52c2d8cf014b7abbebd84d"}, - {file = "pymongo-3.12.3-cp35-cp35m-win_amd64.whl", hash = "sha256:3492ae1f97209c66af70e863e6420e6301cecb0a51a5efa701058aa73a8ca29e"}, - {file = "pymongo-3.12.3-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:87e18f29bac4a6be76a30e74de9c9005475e27100acf0830679420ce1fd9a6fd"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b3e08aef4ea05afbc0a70cd23c13684e7f5e074f02450964ec5cfa1c759d33d2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e66b3c9f8b89d4fd58a59c04fdbf10602a17c914fbaaa5e6ea593f1d54b06362"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5d67dbc8da2dac1644d71c1839d12d12aa333e266a9964d5b1a49feed036bc94"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:a351986d6c9006308f163c359ced40f80b6cffb42069f3e569b979829951038d"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5296669bff390135528001b4e48d33a7acaffcd361d98659628ece7f282f11aa"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:9d5b66d457d2c5739c184a777455c8fde7ab3600a56d8bbebecf64f7c55169e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:1c771f1a8b3cd2d697baaf57e9cfa4ae42371cacfbea42ea01d9577c06d92f96"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81a3ebc33b1367f301d1c8eda57eec4868e951504986d5d3fe437479dcdac5b2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cf113a46d81cff0559d57aa66ffa473d57d1a9496f97426318b6b5b14fdec1c"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64b9122be1c404ce4eb367ad609b590394587a676d84bfed8e03c3ce76d70560"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6c71e198b36f0f0dfe354f06d3655ecfa30d69493a1da125a9a54668aad652"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33ab8c031f788609924e329003088831045f683931932a52a361d4a955b7dce2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2b4c95c47fb81b19ea77dc1c50d23af3eba87c9628fcc2e03d44124a3d336ea"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4e0a3ea7fd01cf0a36509f320226bd8491e0f448f00b8cb89f601c109f6874e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-win32.whl", hash = "sha256:dfec57f15f53d677b8e4535695ff3f37df7f8fe431f2efa8c3c8c4025b53d1eb"}, - {file = "pymongo-3.12.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c22591cff80188dd8543be0b559d0c807f7288bd353dc0bcfe539b4588b3a5cd"}, - {file = "pymongo-3.12.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:7738147cd9dbd6d18d5593b3491b4620e13b61de975fd737283e4ad6c255c273"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:be1f10145f7ea76e3e836fdc5c8429c605675bdcddb0bca9725ee6e26874c00c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:295a5beaecb7bf054c1c6a28749ed72b19f4d4b61edcd8a0815d892424baf780"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:320f8734553c50cffe8a8e1ae36dfc7d7be1941c047489db20a814d2a170d7b5"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:5d20072d81cbfdd8e15e6a0c91fc7e3a4948c71e0adebfc67d3b4bcbe8602711"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:2c46a0afef69d61938a6fe32c3afd75b91dec3ab3056085dc72abbeedcc94166"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:5f530f35e1a57d4360eddcbed6945aecdaee2a491cd3f17025e7b5f2eea88ee7"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6526933760ee1e6090db808f1690a111ec409699c1990efc96f134d26925c37f"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95d15cf81cd2fb926f2a6151a9f94c7aacc102b415e72bc0e040e29332b6731c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d52a70350ec3dfc39b513df12b03b7f4c8f8ec6873bbf958299999db7b05eb1"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9252c991e8176b5a2fa574c5ab9a841679e315f6e576eb7cf0bd958f3e39b0ad"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:145d78c345a38011497e55aff22c0f8edd40ee676a6810f7e69563d68a125e83"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8e0a086dbbee406cc6f603931dfe54d1cb2fba585758e06a2de01037784b737"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6d5443104f89a840250087863c91484a72f254574848e951d1bdd7d8b2ce7c9"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6f93dbfa5a461107bc3f5026e0d5180499e13379e9404f07a9f79eb5e9e1303d"}, - {file = "pymongo-3.12.3-cp37-cp37m-win32.whl", hash = "sha256:c9d212e2af72d5c8d082775a43eb726520e95bf1c84826440f74225843975136"}, - {file = "pymongo-3.12.3-cp37-cp37m-win_amd64.whl", hash = "sha256:320a1fe403dd83a35709fcf01083d14bc1462e9789b711201349a9158db3a87e"}, - {file = "pymongo-3.12.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1ba93be779a9b8e5e44f5c133dc1db4313661cead8a2fd27661e6cb8d942ee9"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4294f2c1cd069b793e31c2e6d7ac44b121cf7cedccd03ebcc30f3fc3417b314a"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:845b178bd127bb074835d2eac635b980c58ec5e700ebadc8355062df708d5a71"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:176fdca18391e1206c32fb1d8265628a84d28333c20ad19468d91e3e98312cd1"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:28bfd5244d32faf3e49b5a8d1fab0631e922c26e8add089312e4be19fb05af50"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:f38b35ecd2628bf0267761ed659e48af7e620a7fcccfccf5774e7308fb18325c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:cebb3d8bcac4a6b48be65ebbc5c9881ed4a738e27bb96c86d9d7580a1fb09e05"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:80710d7591d579442c67a3bc7ae9dcba9ff95ea8414ac98001198d894fc4ff46"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d7baa847383b9814de640c6f1a8553d125ec65e2761ad146ea2e75a7ad197c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:602284e652bb56ca8760f8e88a5280636c5b63d7946fca1c2fe0f83c37dffc64"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2d763d05ec7211313a06e8571236017d3e61d5fef97fcf34ec4b36c0b6556"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6e4dccae8ef5dd76052647d78f02d5d0ffaff1856277d951666c54aeba3ad2"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1fc4d3985868860b6585376e511bb32403c5ffb58b0ed913496c27fd791deea"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4e5d163e6644c2bc84dd9f67bfa89288c23af26983d08fefcc2cbc22f6e57e6"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8d92c6bb9174d47c2257528f64645a00bbc6324a9ff45a626192797aff01dc14"}, - {file = "pymongo-3.12.3-cp38-cp38-win32.whl", hash = "sha256:b0db9a4691074c347f5d7ee830ab3529bc5ad860939de21c1f9c403daf1eda9a"}, - {file = "pymongo-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:d81047341ab56061aa4b6823c54d4632579c3b16e675089e8f520e9b918a133b"}, - {file = "pymongo-3.12.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07398d8a03545b98282f459f2603a6bb271f4448d484ed7f411121a519a7ea48"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b7df0d99e189b7027d417d4bfd9b8c53c9c7ed5a0a1495d26a6f547d820eca88"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a283425e6a474facd73072d8968812d1d9058490a5781e022ccf8895500b83ce"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2577b8161eeae4dd376d13100b2137d883c10bb457dd08935f60c9f9d4b5c5f6"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:517b09b1dd842390a965a896d1327c55dfe78199c9f5840595d40facbcd81854"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2567885ff0c8c7c0887ba6cefe4ae4af96364a66a7069f924ce0cd12eb971d04"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:71c5c200fd37a5322706080b09c3ec8907cf01c377a7187f354fc9e9e13abc73"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:14dee106a10b77224bba5efeeb6aee025aabe88eb87a2b850c46d3ee55bdab4a"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f340a2a908644ea6cccd399be0fb308c66e05d2800107345f9f0f0d59e1731c4"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b4c535f524c9d8c86c3afd71d199025daa070859a2bdaf94a298120b0de16db"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8455176fd1b86de97d859fed4ae0ef867bf998581f584c7a1a591246dfec330f"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf254a1a95e95fdf4eaa25faa1ea450a6533ed7a997f9f8e49ab971b61ea514d"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a3540e21213cb8ce232e68a7d0ee49cdd35194856c50b8bd87eeb572fadd42"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0e7a5d0b9077e8c3e57727f797ee8adf12e1d5e7534642230d98980d160d1320"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0be605bfb8461384a4cb81e80f51eb5ca1b89851f2d0e69a75458c788a7263a4"}, - {file = "pymongo-3.12.3-cp39-cp39-win32.whl", hash = "sha256:2157d68f85c28688e8b723bbe70c8013e0aba5570e08c48b3562f74d33fc05c4"}, - {file = "pymongo-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfa217bf8cf3ff6b30c8e6a89014e0c0e7b50941af787b970060ae5ba04a4ce5"}, - {file = "pymongo-3.12.3-py2.7-macosx-10.14-intel.egg", hash = "sha256:d81299f63dc33cc172c26faf59cc54dd795fc6dd5821a7676cca112a5ee8bbd6"}, - {file = "pymongo-3.12.3.tar.gz", hash = "sha256:0a89cadc0062a5e53664dde043f6c097172b8c1c5f0094490095282ff9995a5f"}, -] +pymongo = [] @@ -1152,23 +1038 @@ pyparsing = [ -pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] +pyrsistent = [] diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 001949b6..f3de8538 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -12 +12 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl", develop = false } diff --git a/services/api/src/api/dataset.py b/services/api/src/api/dataset.py index ea08aac3..04f8d500 100644 --- a/services/api/src/api/dataset.py +++ b/services/api/src/api/dataset.py @@ -10,8 +10 @@ from huggingface_hub.utils import RepositoryNotFoundError -from libcache.simple_cache import ( - DoesNotExist, - delete_first_rows_responses, - delete_splits_responses, - get_splits_response, - mark_first_rows_responses_as_stale, - mark_splits_responses_as_stale, -) +from libcache.simple_cache import DoesNotExist, delete_dataset_responses, get_response @@ -20 +13 @@ from libqueue.queue import Queue -from api.utils import JobType +from api.utils import CacheKind, JobType @@ -52,5 +45,9 @@ def is_supported( -def update(dataset: str, force: bool = False) -> None: - logging.debug(f"webhook: refresh {dataset}") - mark_splits_responses_as_stale(dataset) - mark_first_rows_responses_as_stale(dataset) - splits_queue.add_job(dataset=dataset, force=force) +def update(dataset: str, hf_endpoint: str, hf_token: Optional[str] = None, force: bool = False) -> bool: + if is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): + logging.debug(f"refresh dataset='{dataset}'") + splits_queue.add_job(dataset=dataset, force=force) + return True + else: + logging.debug(f"can't refresh dataset='{dataset}', it's not supported (does not exist, private, etc.)") + return False + @@ -57,0 +55,4 @@ def update(dataset: str, force: bool = False) -> None: +def delete(dataset: str) -> bool: + logging.debug(f"delete cache for dataset='{dataset}'") + delete_dataset_responses(dataset=dataset) + return True @@ -59,4 +60,9 @@ def update(dataset: str, force: bool = False) -> None: -def delete(dataset: str) -> None: - logging.debug(f"webhook: delete {dataset}") - delete_splits_responses(dataset) - delete_first_rows_responses(dataset) + +def move( + from_dataset: str, to_dataset: str, hf_endpoint: str, hf_token: Optional[str] = None, force: bool = False +) -> bool: + # not optimal as we might try to rename instead + if update(dataset=to_dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=force): + return delete(dataset=from_dataset) + else: + return False @@ -73,5 +79 @@ def is_splits_in_process( - if is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): - # the dataset is supported, let's refresh it - update(dataset=dataset, force=False) - return True - return False + return update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=False) @@ -83,2 +85,3 @@ def is_first_rows_in_process( - if first_rows_queue.is_job_in_process(dataset=dataset, config=config, split=split): - # the /first-rows response is not ready yet + if first_rows_queue.is_job_in_process( + dataset=dataset, config=config, split=split + ) or splits_queue.is_job_in_process(dataset=dataset): @@ -87,5 +90,2 @@ def is_first_rows_in_process( - # a bit convoluted, but checking if the first-rows response should exist - # requires to first parse the /splits response for the same dataset - if splits_queue.is_job_in_process(dataset=dataset): - # the /splits response is not ready yet - return True + # a bit convoluted, but to check if the first-rows response should exist, + # we have to check the content of the /splits response for the same dataset @@ -93,13 +93 @@ def is_first_rows_in_process( - result = get_splits_response(dataset) - if result["http_status"] == HTTPStatus.OK and any( - split_item["dataset"] == dataset or split_item["config"] == config or split_item["split"] == split - for split_item in result["response"]["splits"] - ): - # The splits is listed in the /splits response. - # Let's refresh *the whole dataset*, because something did not work - # Note that we "force" the refresh - # - # Caveat: we don't check if the /first-rows response already exists in the cache, - # because we assume it's the reason why one would call this function - update(dataset=dataset, force=True) - return True + result = get_response(kind=CacheKind.SPLITS.value, dataset=dataset) @@ -107,3 +95,18 @@ def is_first_rows_in_process( - # the splits responses does not exist, let's check if it should - return is_splits_in_process(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) - return False + # the splits responses does not exist, update + return update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) + + if result["http_status"] == HTTPStatus.OK and any( + split_item["dataset"] == dataset or split_item["config"] == config or split_item["split"] == split + for split_item in result["content"]["splits"] + ): + # The split is listed in the /splits response. + # Let's refresh *the whole dataset*, because something did not work + # Note that we "force" the refresh + # + # Caveat: we don't check if the /first-rows response already exists in the cache, + # because we assume it's the reason why one would call this function + return update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=True) + else: + # the /splits response is an error, or the split is not listed in the /splits response, so it's normal + # that it's not in the cache + return False diff --git a/services/api/src/api/routes/first_rows.py b/services/api/src/api/routes/first_rows.py index cd9abaef..48a04938 100644 --- a/services/api/src/api/routes/first_rows.py +++ b/services/api/src/api/routes/first_rows.py @@ -8 +8 @@ from typing import Optional -from libcache.simple_cache import DoesNotExist, get_first_rows_response +from libcache.simple_cache import DoesNotExist, get_response @@ -15,0 +16 @@ from api.utils import ( + CacheKind, @@ -47,2 +48,2 @@ def create_first_rows_endpoint( - result = get_first_rows_response(dataset, config, split) - response = result["response"] + result = get_response(kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split) + content = result["content"] @@ -52 +53 @@ def create_first_rows_endpoint( - return get_json_ok_response(content=response, max_age=max_age_long) + return get_json_ok_response(content=content, max_age=max_age_long) @@ -55 +56 @@ def create_first_rows_endpoint( - content=response, status_code=http_status, max_age=max_age_short, error_code=error_code + content=content, status_code=http_status, max_age=max_age_short, error_code=error_code diff --git a/services/api/src/api/routes/splits.py b/services/api/src/api/routes/splits.py index 0d40b69d..d3c7e835 100644 --- a/services/api/src/api/routes/splits.py +++ b/services/api/src/api/routes/splits.py @@ -8 +8 @@ from typing import Optional -from libcache.simple_cache import DoesNotExist, get_splits_response +from libcache.simple_cache import DoesNotExist, get_response @@ -15,0 +16 @@ from api.utils import ( + CacheKind, @@ -45,2 +46,2 @@ def create_splits_endpoint( - result = get_splits_response(dataset) - response = result["response"] + result = get_response(kind=CacheKind.SPLITS.value, dataset=dataset) + content = result["content"] @@ -50 +51 @@ def create_splits_endpoint( - return get_json_ok_response(content=response, max_age=max_age_long) + return get_json_ok_response(content=content, max_age=max_age_long) @@ -53 +54 @@ def create_splits_endpoint( - content=response, status_code=http_status, max_age=max_age_short, error_code=error_code + content=content, status_code=http_status, max_age=max_age_short, error_code=error_code diff --git a/services/api/src/api/routes/valid.py b/services/api/src/api/routes/valid.py index 1745e323..856512ca 100644 --- a/services/api/src/api/routes/valid.py +++ b/services/api/src/api/routes/valid.py @@ -5 +5 @@ import logging -from typing import Optional +from typing import List, Optional @@ -7 +7 @@ from typing import Optional -from libcache.simple_cache import get_valid_dataset_names, is_dataset_name_valid +from libcache.simple_cache import get_valid_datasets, get_validity_by_kind @@ -13,0 +14 @@ from api.utils import ( + CacheKind, @@ -22,0 +24,23 @@ from api.utils import ( +def get_valid() -> List[str]: + # a dataset is considered valid if: + # - the /splits response is valid + datasets = get_valid_datasets(kind=CacheKind.SPLITS.value) + # - at least one of the /first-rows responses is valid + datasets.intersection_update(get_valid_datasets(kind=CacheKind.FIRST_ROWS.value)) + # note that the list is sorted alphabetically for consistency + return sorted(datasets) + + +def is_valid(dataset: str) -> bool: + # a dataset is considered valid if: + # - the /splits response is valid + # - at least one of the /first-rows responses is valid + validity_by_kind = get_validity_by_kind(dataset=dataset) + return ( + CacheKind.SPLITS.value in validity_by_kind + and validity_by_kind[CacheKind.SPLITS.value] + and CacheKind.FIRST_ROWS.value in validity_by_kind + and validity_by_kind[CacheKind.FIRST_ROWS.value] + ) + + @@ -26 +50 @@ async def valid_endpoint(_: Request) -> Response: - content = {"valid": get_valid_dataset_names()} + content = {"valid": get_valid()} @@ -37,3 +61,3 @@ def create_is_valid_endpoint( - dataset_name = request.query_params.get("dataset") - logging.info(f"/is-valid, dataset={dataset_name}") - if not are_valid_parameters([dataset_name]): + dataset = request.query_params.get("dataset") + logging.info(f"/is-valid, dataset={dataset}") + if not are_valid_parameters([dataset]): @@ -42 +66 @@ def create_is_valid_endpoint( - auth_check(dataset_name, external_auth_url=external_auth_url, request=request) + auth_check(dataset, external_auth_url=external_auth_url, request=request) @@ -44 +68 @@ def create_is_valid_endpoint( - "valid": is_dataset_name_valid(dataset_name), + "valid": is_valid(dataset), diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index 628d4d57..829119e3 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -11 +11 @@ from starlette.responses import Response -from api.dataset import delete, is_supported, update +from api.dataset import delete, move, update @@ -65,2 +65 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - if is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): - update(dataset=dataset, force=False) + update(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token, force=False) @@ -73,4 +72 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - if is_supported(dataset=moved_to, hf_endpoint=hf_endpoint, hf_token=hf_token): - # not optimal as we might try to rename instead - update(dataset=moved_to, force=False) - delete(dataset=dataset) + move(from_dataset=dataset, to_dataset=moved_to, hf_endpoint=hf_endpoint, hf_token=hf_token, force=False) diff --git a/services/api/src/api/utils.py b/services/api/src/api/utils.py index c8cab845..f0014bd2 100644 --- a/services/api/src/api/utils.py +++ b/services/api/src/api/utils.py @@ -146,0 +147,5 @@ class JobType(Enum): + + +class CacheKind(Enum): + SPLITS = "/splits" + FIRST_ROWS = "/first-rows" diff --git a/services/api/tests/routes/__init__.py b/services/api/tests/routes/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/api/tests/routes/test_valid.py b/services/api/tests/routes/test_valid.py new file mode 100644 index 00000000..1d846bf9 --- /dev/null +++ b/services/api/tests/routes/test_valid.py @@ -0,0 +1,122 @@ +from http import HTTPStatus + +import pytest +from libcache.simple_cache import _clean_cache_database, upsert_response + +from api.routes.valid import get_valid, is_valid +from api.utils import CacheKind + + [email protected](autouse=True) +def clean_mongo_databases() -> None: + _clean_cache_database() + + +def test_empty() -> None: + assert get_valid() == [] + assert is_valid("dataset") is False + + +def test_only_splits() -> None: + dataset = "dataset" + upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) + assert get_valid() == [] + assert is_valid("dataset") is False + + +def test_only_first_rows() -> None: + dataset = "dataset" + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config="config", + split="split", + content={}, + http_status=HTTPStatus.OK, + ) + assert get_valid() == [] + assert is_valid("dataset") is False + + +def test_splits_and_first_rows_ok() -> None: + dataset = "dataset" + upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config="config", + split="split", + content={}, + http_status=HTTPStatus.OK, + ) + assert get_valid() == [dataset] + assert is_valid("dataset") is True + + +def test_splits_and_first_rows_ok_and_error() -> None: + dataset = "dataset" + upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config="config", + split="split_a", + content={}, + http_status=HTTPStatus.OK, + ) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config="config", + split="split_b", + content={}, + http_status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + assert get_valid() == [dataset] + assert is_valid("dataset") is True + + +def test_splits_and_first_rows_only_errors() -> None: + dataset = "dataset" + upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config="config", + split="split", + content={}, + http_status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + assert get_valid() == [] + assert is_valid("dataset") is False + + +def test_valid_datasets() -> None: + dataset_a = "dataset_a" + dataset_b = "dataset_b" + dataset_c = "dataset_c" + upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset_a, content={}, http_status=HTTPStatus.OK) + upsert_response(kind=CacheKind.SPLITS.value, dataset=dataset_b, content={}, http_status=HTTPStatus.OK) + upsert_response( + kind=CacheKind.SPLITS.value, dataset=dataset_c, content={}, http_status=HTTPStatus.INTERNAL_SERVER_ERROR + ) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset_a, + config="config", + split="split", + content={}, + http_status=HTTPStatus.OK, + ) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset_b, + config="config", + split="split", + content={}, + http_status=HTTPStatus.OK, + ) + assert get_valid() == [dataset_a, dataset_b] + assert is_valid(dataset_a) is True + assert is_valid(dataset_b) is True + assert is_valid(dataset_c) is False diff --git a/services/api/tests/test_app.py b/services/api/tests/test_app.py index 49d41b96..1e6a17c3 100644 --- a/services/api/tests/test_app.py +++ b/services/api/tests/test_app.py @@ -9,2 +9 @@ import pytest -from libcache.simple_cache import _clean_database as _clean_cache_database -from libcache.simple_cache import upsert_first_rows_response, upsert_splits_response +from libcache.simple_cache import _clean_cache_database, upsert_response @@ -16 +15 @@ from api.app import create_app -from api.utils import JobType +from api.utils import CacheKind, JobType @@ -185 +184,3 @@ def test_splits_cache_refreshing( - upsert_splits_response(dataset, {"key": "value"}, HTTPStatus.OK) + upsert_response( + kind=CacheKind.SPLITS.value, dataset=dataset, content={"key": "value"}, http_status=HTTPStatus.OK + ) @@ -224 +225,8 @@ def test_first_rows_cache_refreshing( - upsert_first_rows_response(dataset, config, split, {"key": "value"}, HTTPStatus.OK) + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config=config, + split=split, + content={"key": "value"}, + http_status=HTTPStatus.OK, + ) diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index a08b28cc..a92b0be1 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -31 +31 @@ name = "aiosignal" -version = "1.2.0" +version = "1.3.1" @@ -35 +35 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -280 +280 @@ name = "colorama" -version = "0.4.5" +version = "0.4.6" @@ -284 +284 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" @@ -351 +351 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -357 +357 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -455 +455 @@ name = "fastavro" -version = "1.6.1" +version = "1.7.0" @@ -494 +494 @@ name = "flatbuffers" -version = "22.9.24" +version = "22.10.26" @@ -502 +502 @@ name = "frozenlist" -version = "1.3.1" +version = "1.3.3" @@ -590 +590 @@ name = "google-auth" -version = "2.13.0" +version = "2.14.1" @@ -605 +605 @@ enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["pyopenssl (>=20.0.0)"] +pyopenssl = ["pyopenssl (>=20.0.0)", "cryptography (>=38.0.3)"] @@ -827 +827 @@ name = "libcache" -version = "0.3.4" +version = "0.4.0" @@ -842 +842 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl" @@ -973 +973 @@ name = "marshmallow" -version = "3.18.0" +version = "3.19.0" @@ -983,3 +983,3 @@ packaging = ">=17.0" -dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] -docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] -lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)"] @@ -1123 +1123 @@ name = "numba" -version = "0.56.3" +version = "0.56.4" @@ -1182 +1182 @@ name = "orjson" -version = "3.8.0" +version = "3.8.1" @@ -1217 +1217 @@ name = "pathspec" -version = "0.10.1" +version = "0.10.2" @@ -1245,2 +1245,2 @@ name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -1252,2 +1252,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] @@ -1449 +1449 @@ name = "pyicu" -version = "2.9" +version = "2.10.2" @@ -1457 +1457 @@ name = "pymongo" -version = "3.12.3" +version = "3.13.0" @@ -1569 +1569 @@ name = "pytz" -version = "2022.5" +version = "2022.6" @@ -1601 +1601 @@ name = "regex" -version = "2022.9.13" +version = "2022.10.31" @@ -1728 +1728 @@ name = "scikit-learn" -version = "1.1.2" +version = "1.1.3" @@ -1772,2 +1772,2 @@ name = "sklearn" -version = "0.0" -description = "A set of python modules for machine learning and data mining" +version = "0.0.post1" +description = "deprecated sklearn package, use scikit-learn instead" @@ -1778,3 +1777,0 @@ python-versions = "*" -[package.dependencies] -scikit-learn = "*" - @@ -1813 +1810 @@ name = "stevedore" -version = "4.1.0" +version = "4.1.1" @@ -1861 +1858 @@ name = "tensorflow" -version = "2.10.0" +version = "2.10.1" @@ -1945 +1942 @@ name = "termcolor" -version = "2.0.1" +version = "2.1.0" @@ -1952 +1949 @@ python-versions = ">=3.7" -tests = ["pytest-cov", "pytest"] +tests = ["pytest", "pytest-cov"] @@ -1985 +1982 @@ name = "tokenizers" -version = "0.13.1" +version = "0.13.2" @@ -1992 +1989 @@ python-versions = "*" -dev = ["pytest", "requests", "numpy", "datasets"] +dev = ["pytest", "requests", "numpy", "datasets", "black (==22.3)"] @@ -1994 +1991 @@ docs = ["sphinx", "sphinx-rtd-theme", "setuptools-rust"] -testing = ["pytest", "requests", "numpy", "datasets"] +testing = ["pytest", "requests", "numpy", "datasets", "black (==22.3)"] @@ -2061 +2058 @@ name = "transformers" -version = "4.23.1" +version = "4.24.0" @@ -2080,2 +2077,2 @@ accelerate = ["accelerate (>=0.10.0)"] -all = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)"] -audio = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +all = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)"] +audio = ["librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2085,4 +2082,5 @@ deepspeed-testing = ["deepspeed (>=0.6.5)", "accelerate (>=0.10.0)", "pytest", " -dev = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn"] -dev-tensorflow = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "pillow", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -dev-torch = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "torch (>=1.7,!=1.12.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -docs = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "hf-doc-builder"] +dev = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn"] +dev-tensorflow = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "pillow", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] +testing = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)"] +dev-torch = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "torch (>=1.7,!=1.12.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +docs = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "hf-doc-builder"] @@ -2092 +2090 @@ flax = ["jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0 -flax-speech = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +flax-speech = ["librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2108,2 +2106 @@ sklearn = ["scikit-learn"] -speech = ["torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -testing = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)"] +speech = ["torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2112 +2109 @@ tf-cpu = ["tensorflow-cpu (>=2.3)", "onnxconverter-common", "tf2onnx", "tensorfl -tf-speech = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +tf-speech = ["librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2116 +2113 @@ torch = ["torch (>=1.7,!=1.12.0)"] -torch-speech = ["torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +torch-speech = ["torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2164 +2161 @@ name = "types-requests" -version = "2.28.11.2" +version = "2.28.11.5" @@ -2175 +2172 @@ name = "types-urllib3" -version = "1.26.25.1" +version = "1.26.25.4" @@ -2262 +2259 @@ name = "zipp" -version = "3.9.0" +version = "3.10.0" @@ -2274 +2271 @@ name = "zstandard" -version = "0.18.0" +version = "0.19.0" @@ -2289 +2286 @@ python-versions = "3.9.6" -content-hash = "14a3bc11393fb1360f9ce68375795fdc65008e4815d7a7e620bf26fff7ab009b" +content-hash = "eb01a846e042f679dcf4bd26176654610b489950f9d6c442347bf814080b8da2" @@ -2294,4 +2291 @@ aiohttp = [] -aiosignal = [ - {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, - {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, -] +aiosignal = [] @@ -2436,4 +2430 @@ cloudpickle = [] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] +colorama = [] @@ -2619 +2610 @@ libcache = [ - {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, + {file = "libcache-0.4.0-py3-none-any.whl", hash = "sha256:fd89c5935b219a67783283f35611f61d983b6df8dc79d687c470b3fb9754741d"}, @@ -2796,4 +2787 @@ pillow = [] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +platformdirs = [] @@ -2903,112 +2891,2 @@ pyflakes = [ -pyicu = [ - {file = "PyICU-2.9.tar.gz", hash = "sha256:3c29d6ce65546157117a1a347a303ecdfcf1a7591ed679fc88cdef4108845878"}, -] -pymongo = [ - {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8c7ad5cab282f53b9d78d51504330d1c88c83fbe187e472c07e6908a0293142e"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a766157b195a897c64945d4ff87b050bb0e763bb78f3964e996378621c703b00"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8d6bf6fcd42cde2f02efb8126812a010c297eacefcd090a609639d2aeda6185"}, - {file = "pymongo-3.12.3-cp27-cp27m-win32.whl", hash = "sha256:5fdffb0cfeb4dc8646a5381d32ec981ae8472f29c695bf09e8f7a8edb2db12ca"}, - {file = "pymongo-3.12.3-cp27-cp27m-win_amd64.whl", hash = "sha256:648fcfd8e019b122b7be0e26830a3a2224d57c3e934f19c1e53a77b8380e6675"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3f0ac6e0203bd88863649e6ed9c7cfe53afab304bc8225f2597c4c0a74e4d1f0"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:71c0db2c313ea8a80825fb61b7826b8015874aec29ee6364ade5cb774fe4511b"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b779e87300635b8075e8d5cfd4fdf7f46078cd7610c381d956bca5556bb8f97"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:351a2efe1c9566c348ad0076f4bf541f4905a0ebe2d271f112f60852575f3c16"}, - {file = "pymongo-3.12.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a02313e71b7c370c43056f6b16c45effbb2d29a44d24403a3d5ba6ed322fa3f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:d3082e5c4d7b388792124f5e805b469109e58f1ab1eb1fbd8b998e8ab766ffb7"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:514e78d20d8382d5b97f32b20c83d1d0452c302c9a135f0a9022236eb9940fda"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:b1b5be40ebf52c3c67ee547e2c4435ed5bc6352f38d23e394520b686641a6be4"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:58db209da08a502ce6948841d522dcec80921d714024354153d00b054571993c"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:5296e5e69243ffd76bd919854c4da6630ae52e46175c804bc4c0e050d937b705"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:51d1d061df3995c2332ae78f036492cc188cb3da8ef122caeab3631a67bb477e"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b974b7f49d65a16ca1435bc1c25a681bb7d630509dd23b2e819ed36da0b7f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e099b79ccf7c40f18b149a64d3d10639980035f9ceb223169dd806ff1bb0d9cc"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27e5ea64332385385b75414888ce9d1a9806be8616d7cef4ef409f4f256c6d06"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed7d11330e443aeecab23866055e08a5a536c95d2c25333aeb441af2dbac38d2"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93111fd4e08fa889c126aa8baf5c009a941880a539c87672e04583286517450a"}, - {file = "pymongo-3.12.3-cp310-cp310-win32.whl", hash = "sha256:2301051701b27aff2cbdf83fae22b7ca883c9563dfd088033267291b46196643"}, - {file = "pymongo-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:c7e8221278e5f9e2b6d3893cfc3a3e46c017161a57bb0e6f244826e4cee97916"}, - {file = "pymongo-3.12.3-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:7b4a9fcd95e978cd3c96cdc2096aa54705266551422cf0883c12a4044def31c6"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:06b64cdf5121f86b78a84e61b8f899b6988732a8d304b503ea1f94a676221c06"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:c8f7dd025cb0bf19e2f60a64dfc24b513c8330e0cfe4a34ccf941eafd6194d9e"}, - {file = "pymongo-3.12.3-cp34-cp34m-win32.whl", hash = "sha256:ab23b0545ec71ea346bf50a5d376d674f56205b729980eaa62cdb7871805014b"}, - {file = "pymongo-3.12.3-cp34-cp34m-win_amd64.whl", hash = "sha256:1b5cb75d2642ff7db823f509641f143f752c0d1ab03166cafea1e42e50469834"}, - {file = "pymongo-3.12.3-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:fc2048d13ff427605fea328cbe5369dce549b8c7657b0e22051a5b8831170af6"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c5f83bb59d0ff60c6fdb1f8a7b0288fbc4640b1f0fd56f5ae2387749c35d34e3"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6632b1c63d58cddc72f43ab9f17267354ddce563dd5e11eadabd222dcc808808"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fedad05147b40ff8a93fcd016c421e6c159f149a2a481cfa0b94bfa3e473bab"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:208a61db8b8b647fb5b1ff3b52b4ed6dbced01eac3b61009958adb203596ee99"}, - {file = "pymongo-3.12.3-cp35-cp35m-win32.whl", hash = "sha256:3100a2352bdded6232b385ceda0c0a4624598c517d52c2d8cf014b7abbebd84d"}, - {file = "pymongo-3.12.3-cp35-cp35m-win_amd64.whl", hash = "sha256:3492ae1f97209c66af70e863e6420e6301cecb0a51a5efa701058aa73a8ca29e"}, - {file = "pymongo-3.12.3-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:87e18f29bac4a6be76a30e74de9c9005475e27100acf0830679420ce1fd9a6fd"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b3e08aef4ea05afbc0a70cd23c13684e7f5e074f02450964ec5cfa1c759d33d2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e66b3c9f8b89d4fd58a59c04fdbf10602a17c914fbaaa5e6ea593f1d54b06362"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5d67dbc8da2dac1644d71c1839d12d12aa333e266a9964d5b1a49feed036bc94"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:a351986d6c9006308f163c359ced40f80b6cffb42069f3e569b979829951038d"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5296669bff390135528001b4e48d33a7acaffcd361d98659628ece7f282f11aa"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:9d5b66d457d2c5739c184a777455c8fde7ab3600a56d8bbebecf64f7c55169e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:1c771f1a8b3cd2d697baaf57e9cfa4ae42371cacfbea42ea01d9577c06d92f96"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81a3ebc33b1367f301d1c8eda57eec4868e951504986d5d3fe437479dcdac5b2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cf113a46d81cff0559d57aa66ffa473d57d1a9496f97426318b6b5b14fdec1c"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64b9122be1c404ce4eb367ad609b590394587a676d84bfed8e03c3ce76d70560"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6c71e198b36f0f0dfe354f06d3655ecfa30d69493a1da125a9a54668aad652"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33ab8c031f788609924e329003088831045f683931932a52a361d4a955b7dce2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2b4c95c47fb81b19ea77dc1c50d23af3eba87c9628fcc2e03d44124a3d336ea"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4e0a3ea7fd01cf0a36509f320226bd8491e0f448f00b8cb89f601c109f6874e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-win32.whl", hash = "sha256:dfec57f15f53d677b8e4535695ff3f37df7f8fe431f2efa8c3c8c4025b53d1eb"}, - {file = "pymongo-3.12.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c22591cff80188dd8543be0b559d0c807f7288bd353dc0bcfe539b4588b3a5cd"}, - {file = "pymongo-3.12.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:7738147cd9dbd6d18d5593b3491b4620e13b61de975fd737283e4ad6c255c273"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:be1f10145f7ea76e3e836fdc5c8429c605675bdcddb0bca9725ee6e26874c00c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:295a5beaecb7bf054c1c6a28749ed72b19f4d4b61edcd8a0815d892424baf780"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:320f8734553c50cffe8a8e1ae36dfc7d7be1941c047489db20a814d2a170d7b5"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:5d20072d81cbfdd8e15e6a0c91fc7e3a4948c71e0adebfc67d3b4bcbe8602711"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:2c46a0afef69d61938a6fe32c3afd75b91dec3ab3056085dc72abbeedcc94166"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:5f530f35e1a57d4360eddcbed6945aecdaee2a491cd3f17025e7b5f2eea88ee7"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6526933760ee1e6090db808f1690a111ec409699c1990efc96f134d26925c37f"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95d15cf81cd2fb926f2a6151a9f94c7aacc102b415e72bc0e040e29332b6731c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d52a70350ec3dfc39b513df12b03b7f4c8f8ec6873bbf958299999db7b05eb1"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9252c991e8176b5a2fa574c5ab9a841679e315f6e576eb7cf0bd958f3e39b0ad"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:145d78c345a38011497e55aff22c0f8edd40ee676a6810f7e69563d68a125e83"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8e0a086dbbee406cc6f603931dfe54d1cb2fba585758e06a2de01037784b737"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6d5443104f89a840250087863c91484a72f254574848e951d1bdd7d8b2ce7c9"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6f93dbfa5a461107bc3f5026e0d5180499e13379e9404f07a9f79eb5e9e1303d"}, - {file = "pymongo-3.12.3-cp37-cp37m-win32.whl", hash = "sha256:c9d212e2af72d5c8d082775a43eb726520e95bf1c84826440f74225843975136"}, - {file = "pymongo-3.12.3-cp37-cp37m-win_amd64.whl", hash = "sha256:320a1fe403dd83a35709fcf01083d14bc1462e9789b711201349a9158db3a87e"}, - {file = "pymongo-3.12.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1ba93be779a9b8e5e44f5c133dc1db4313661cead8a2fd27661e6cb8d942ee9"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4294f2c1cd069b793e31c2e6d7ac44b121cf7cedccd03ebcc30f3fc3417b314a"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:845b178bd127bb074835d2eac635b980c58ec5e700ebadc8355062df708d5a71"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:176fdca18391e1206c32fb1d8265628a84d28333c20ad19468d91e3e98312cd1"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:28bfd5244d32faf3e49b5a8d1fab0631e922c26e8add089312e4be19fb05af50"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:f38b35ecd2628bf0267761ed659e48af7e620a7fcccfccf5774e7308fb18325c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:cebb3d8bcac4a6b48be65ebbc5c9881ed4a738e27bb96c86d9d7580a1fb09e05"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:80710d7591d579442c67a3bc7ae9dcba9ff95ea8414ac98001198d894fc4ff46"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d7baa847383b9814de640c6f1a8553d125ec65e2761ad146ea2e75a7ad197c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:602284e652bb56ca8760f8e88a5280636c5b63d7946fca1c2fe0f83c37dffc64"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2d763d05ec7211313a06e8571236017d3e61d5fef97fcf34ec4b36c0b6556"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6e4dccae8ef5dd76052647d78f02d5d0ffaff1856277d951666c54aeba3ad2"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1fc4d3985868860b6585376e511bb32403c5ffb58b0ed913496c27fd791deea"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4e5d163e6644c2bc84dd9f67bfa89288c23af26983d08fefcc2cbc22f6e57e6"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8d92c6bb9174d47c2257528f64645a00bbc6324a9ff45a626192797aff01dc14"}, - {file = "pymongo-3.12.3-cp38-cp38-win32.whl", hash = "sha256:b0db9a4691074c347f5d7ee830ab3529bc5ad860939de21c1f9c403daf1eda9a"}, - {file = "pymongo-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:d81047341ab56061aa4b6823c54d4632579c3b16e675089e8f520e9b918a133b"}, - {file = "pymongo-3.12.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07398d8a03545b98282f459f2603a6bb271f4448d484ed7f411121a519a7ea48"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b7df0d99e189b7027d417d4bfd9b8c53c9c7ed5a0a1495d26a6f547d820eca88"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a283425e6a474facd73072d8968812d1d9058490a5781e022ccf8895500b83ce"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2577b8161eeae4dd376d13100b2137d883c10bb457dd08935f60c9f9d4b5c5f6"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:517b09b1dd842390a965a896d1327c55dfe78199c9f5840595d40facbcd81854"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2567885ff0c8c7c0887ba6cefe4ae4af96364a66a7069f924ce0cd12eb971d04"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:71c5c200fd37a5322706080b09c3ec8907cf01c377a7187f354fc9e9e13abc73"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:14dee106a10b77224bba5efeeb6aee025aabe88eb87a2b850c46d3ee55bdab4a"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f340a2a908644ea6cccd399be0fb308c66e05d2800107345f9f0f0d59e1731c4"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b4c535f524c9d8c86c3afd71d199025daa070859a2bdaf94a298120b0de16db"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8455176fd1b86de97d859fed4ae0ef867bf998581f584c7a1a591246dfec330f"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf254a1a95e95fdf4eaa25faa1ea450a6533ed7a997f9f8e49ab971b61ea514d"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a3540e21213cb8ce232e68a7d0ee49cdd35194856c50b8bd87eeb572fadd42"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0e7a5d0b9077e8c3e57727f797ee8adf12e1d5e7534642230d98980d160d1320"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0be605bfb8461384a4cb81e80f51eb5ca1b89851f2d0e69a75458c788a7263a4"}, - {file = "pymongo-3.12.3-cp39-cp39-win32.whl", hash = "sha256:2157d68f85c28688e8b723bbe70c8013e0aba5570e08c48b3562f74d33fc05c4"}, - {file = "pymongo-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfa217bf8cf3ff6b30c8e6a89014e0c0e7b50941af787b970060ae5ba04a4ce5"}, - {file = "pymongo-3.12.3-py2.7-macosx-10.14-intel.egg", hash = "sha256:d81299f63dc33cc172c26faf59cc54dd795fc6dd5821a7676cca112a5ee8bbd6"}, - {file = "pymongo-3.12.3.tar.gz", hash = "sha256:0a89cadc0062a5e53664dde043f6c097172b8c1c5f0094490095282ff9995a5f"}, -] +pyicu = [] +pymongo = [] @@ -3100,3 +2978 @@ six = [ -sklearn = [ - {file = "sklearn-0.0.tar.gz", hash = "sha256:e23001573aa194b834122d2b9562459bf5ae494a2d59ca6b8aa22c85a44c0e31"}, -] +sklearn = [] diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 05395184..4b82bd4c 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/utils.py b/workers/first_rows/src/first_rows/utils.py index a6bf597c..202b9ea5 100644 --- a/workers/first_rows/src/first_rows/utils.py +++ b/workers/first_rows/src/first_rows/utils.py @@ -159,0 +160,5 @@ class Queues: + + +class CacheKind(Enum): + SPLITS = "/splits" + FIRST_ROWS = "/first-rows" diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index bff0214c..bed2925e 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -9 +9 @@ from typing import Optional -from libcache.simple_cache import get_first_rows_response, upsert_first_rows_response +from libcache.simple_cache import get_response_without_content, upsert_response @@ -14,0 +15 @@ from first_rows.utils import ( + CacheKind, @@ -60 +61,3 @@ class FirstRowsWorker(Worker): - cache_entry = get_first_rows_response(dataset_name=dataset, config_name=config, split_name=split) + cached_response = get_response_without_content( + kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split + ) @@ -65,5 +68,6 @@ class FirstRowsWorker(Worker): - cache_entry["http_status"] == HTTPStatus.OK - and cache_entry["worker_version"] is not None - and self.compare_major_version(cache_entry["worker_version"]) == 0 - and cache_entry["dataset_git_revision"] is not None - and cache_entry["dataset_git_revision"] == dataset_git_revision + # TODO: use "error_code" to decide if the job should be skipped (ex: retry if temporary error) + cached_response["http_status"] == HTTPStatus.OK + and cached_response["worker_version"] is not None + and self.compare_major_version(cached_response["worker_version"]) == 0 + and cached_response["dataset_git_revision"] is not None + and cached_response["dataset_git_revision"] == dataset_git_revision @@ -98,5 +102,6 @@ class FirstRowsWorker(Worker): - upsert_first_rows_response( - dataset_name=dataset, - config_name=config, - split_name=split, - response=dict(result["first_rows_response"]), + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config=config, + split=split, + content=dict(result["first_rows_response"]), @@ -115,5 +120,6 @@ class FirstRowsWorker(Worker): - upsert_first_rows_response( - dataset_name=dataset, - config_name=config, - split_name=split, - response=dict(err.as_response()), + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config=config, + split=split, + content=dict(err.as_response()), @@ -130,5 +136,6 @@ class FirstRowsWorker(Worker): - upsert_first_rows_response( - dataset_name=dataset, - config_name=config, - split_name=split, - response=dict(e.as_response()), + upsert_response( + kind=CacheKind.FIRST_ROWS.value, + dataset=dataset, + config=config, + split=split, + content=dict(e.as_response()), diff --git a/workers/first_rows/tests/test_worker.py b/workers/first_rows/tests/test_worker.py index 78475bfb..0dd989d5 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/first_rows/tests/test_worker.py @@ -8,3 +8 @@ import pytest -from libcache.simple_cache import DoesNotExist -from libcache.simple_cache import _clean_database as _clean_cache_database -from libcache.simple_cache import get_first_rows_response +from libcache.simple_cache import DoesNotExist, _clean_cache_database, get_response @@ -13,0 +12 @@ from first_rows.config import WorkerConfig +from first_rows.utils import CacheKind @@ -42,12 +41,12 @@ def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: - cache_entry = get_first_rows_response(dataset_name=dataset, config_name=config, split_name=split) - assert cache_entry["http_status"] == HTTPStatus.OK - assert cache_entry["error_code"] is None - assert cache_entry["worker_version"] == worker.version - assert cache_entry["dataset_git_revision"] is not None - response = cache_entry["response"] - assert response["features"][0]["feature_idx"] == 0 - assert response["features"][0]["name"] == "col_1" - assert response["features"][0]["type"]["_type"] == "Value" - assert response["features"][0]["type"]["dtype"] == "int64" # <---| - assert response["features"][1]["type"]["dtype"] == "int64" # <---|- auto-detected by the datasets library - assert response["features"][2]["type"]["dtype"] == "float64" # <-| + cached_response = get_response(kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + assert cached_response["worker_version"] == worker.version + assert cached_response["dataset_git_revision"] is not None + content = cached_response["content"] + assert content["features"][0]["feature_idx"] == 0 + assert content["features"][0]["name"] == "col_1" + assert content["features"][0]["type"]["_type"] == "Value" + assert content["features"][0]["type"]["dtype"] == "int64" # <---| + assert content["features"][1]["type"]["dtype"] == "int64" # <---|- auto-detected by the datasets library + assert content["features"][2]["type"]["dtype"] == "float64" # <-| @@ -61 +60 @@ def test_doesnotexist(worker: FirstRowsWorker) -> None: - get_first_rows_response(dataset_name=dataset, config_name=config, split_name=split) + get_response(kind=CacheKind.FIRST_ROWS.value, dataset=dataset, config=config, split=split) diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 389a4415..5d58b35d 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -31 +31 @@ name = "aiosignal" -version = "1.2.0" +version = "1.3.1" @@ -35 +35 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -280 +280 @@ name = "colorama" -version = "0.4.5" +version = "0.4.6" @@ -284 +284 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" @@ -455 +455 @@ name = "fastavro" -version = "1.6.1" +version = "1.7.0" @@ -494 +494 @@ name = "flatbuffers" -version = "22.9.24" +version = "22.10.26" @@ -502 +502 @@ name = "frozenlist" -version = "1.3.1" +version = "1.3.3" @@ -590 +590 @@ name = "google-auth" -version = "2.13.0" +version = "2.14.1" @@ -605 +605 @@ enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["pyopenssl (>=20.0.0)"] +pyopenssl = ["pyopenssl (>=20.0.0)", "cryptography (>=38.0.3)"] @@ -827 +827 @@ name = "libcache" -version = "0.3.4" +version = "0.4.0" @@ -842 +842 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl" @@ -973 +973 @@ name = "marshmallow" -version = "3.18.0" +version = "3.19.0" @@ -983,3 +983,3 @@ packaging = ">=17.0" -dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] -docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] -lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)"] @@ -1123 +1123 @@ name = "numba" -version = "0.56.3" +version = "0.56.4" @@ -1182 +1182 @@ name = "orjson" -version = "3.8.0" +version = "3.8.1" @@ -1217 +1217 @@ name = "pathspec" -version = "0.10.1" +version = "0.10.2" @@ -1245,2 +1245,2 @@ name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -1252,2 +1252,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] @@ -1449 +1449 @@ name = "pyicu" -version = "2.9" +version = "2.10.2" @@ -1457 +1457 @@ name = "pymongo" -version = "3.12.3" +version = "3.13.0" @@ -1569 +1569 @@ name = "pytz" -version = "2022.5" +version = "2022.6" @@ -1601 +1601 @@ name = "regex" -version = "2022.9.13" +version = "2022.10.31" @@ -1728 +1728 @@ name = "scikit-learn" -version = "1.1.2" +version = "1.1.3" @@ -1772,2 +1772,2 @@ name = "sklearn" -version = "0.0" -description = "A set of python modules for machine learning and data mining" +version = "0.0.post1" +description = "deprecated sklearn package, use scikit-learn instead" @@ -1778,3 +1777,0 @@ python-versions = "*" -[package.dependencies] -scikit-learn = "*" - @@ -1813 +1810 @@ name = "stevedore" -version = "4.1.0" +version = "4.1.1" @@ -1861 +1858 @@ name = "tensorflow" -version = "2.10.0" +version = "2.10.1" @@ -1945 +1942 @@ name = "termcolor" -version = "2.0.1" +version = "2.1.0" @@ -1952 +1949 @@ python-versions = ">=3.7" -tests = ["pytest-cov", "pytest"] +tests = ["pytest", "pytest-cov"] @@ -1985 +1982 @@ name = "tokenizers" -version = "0.13.1" +version = "0.13.2" @@ -1992 +1989 @@ python-versions = "*" -dev = ["pytest", "requests", "numpy", "datasets"] +dev = ["pytest", "requests", "numpy", "datasets", "black (==22.3)"] @@ -1994 +1991 @@ docs = ["sphinx", "sphinx-rtd-theme", "setuptools-rust"] -testing = ["pytest", "requests", "numpy", "datasets"] +testing = ["pytest", "requests", "numpy", "datasets", "black (==22.3)"] @@ -2061 +2058 @@ name = "transformers" -version = "4.23.1" +version = "4.24.0" @@ -2080,2 +2077,2 @@ accelerate = ["accelerate (>=0.10.0)"] -all = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)"] -audio = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +all = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)"] +audio = ["librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2085,4 +2082,5 @@ deepspeed-testing = ["deepspeed (>=0.6.5)", "accelerate (>=0.10.0)", "pytest", " -dev = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn"] -dev-tensorflow = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "pillow", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -dev-torch = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "torch (>=1.7,!=1.12.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -docs = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "hf-doc-builder"] +dev = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn"] +dev-tensorflow = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "pillow", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] +testing = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)"] +dev-torch = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)", "torch (>=1.7,!=1.12.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "sudachipy (>=0.6.6)", "sudachidict-core (>=20220729)", "pyknp (>=0.6.1)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +docs = ["tensorflow (>=2.4)", "onnxconverter-common", "tf2onnx", "tensorflow-text", "torch (>=1.7,!=1.12.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.4.1)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.2)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "accelerate (>=0.10.0)", "hf-doc-builder"] @@ -2092 +2090 @@ flax = ["jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0 -flax-speech = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +flax-speech = ["librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2108,2 +2106 @@ sklearn = ["scikit-learn"] -speech = ["torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -testing = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "pytest-timeout", "black (==22.3)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.2)", "sacremoses", "rjieba", "safetensors (>=0.2.1)", "beautifulsoup4", "faiss-cpu", "cookiecutter (==1.7.3)"] +speech = ["torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2112 +2109 @@ tf-cpu = ["tensorflow-cpu (>=2.3)", "onnxconverter-common", "tf2onnx", "tensorfl -tf-speech = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +tf-speech = ["librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2116 +2113 @@ torch = ["torch (>=1.7,!=1.12.0)"] -torch-speech = ["torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +torch-speech = ["torchaudio", "librosa", "pyctcdecode (>=0.4.0)", "phonemizer", "kenlm"] @@ -2164 +2161 @@ name = "types-requests" -version = "2.28.11.2" +version = "2.28.11.5" @@ -2175 +2172 @@ name = "types-urllib3" -version = "1.26.25.1" +version = "1.26.25.4" @@ -2262 +2259 @@ name = "zipp" -version = "3.9.0" +version = "3.10.0" @@ -2274 +2271 @@ name = "zstandard" -version = "0.18.0" +version = "0.19.0" @@ -2289 +2286 @@ python-versions = "3.9.6" -content-hash = "b47daad64be3fa9cc6433c97646a6df2193d0746216404ca26209b547f45ddcd" +content-hash = "c728e53c216c19c8a2e8e63b01dc1675e15e040e3b0166c7a9716bba283bf829" @@ -2294,4 +2291 @@ aiohttp = [] -aiosignal = [ - {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, - {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, -] +aiosignal = [] @@ -2436,4 +2430 @@ cloudpickle = [] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] +colorama = [] @@ -2619 +2610 @@ libcache = [ - {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, + {file = "libcache-0.4.0-py3-none-any.whl", hash = "sha256:fd89c5935b219a67783283f35611f61d983b6df8dc79d687c470b3fb9754741d"}, @@ -2796,4 +2787 @@ pillow = [] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +platformdirs = [] @@ -2903,112 +2891,2 @@ pyflakes = [ -pyicu = [ - {file = "PyICU-2.9.tar.gz", hash = "sha256:3c29d6ce65546157117a1a347a303ecdfcf1a7591ed679fc88cdef4108845878"}, -] -pymongo = [ - {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8c7ad5cab282f53b9d78d51504330d1c88c83fbe187e472c07e6908a0293142e"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a766157b195a897c64945d4ff87b050bb0e763bb78f3964e996378621c703b00"}, - {file = "pymongo-3.12.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8d6bf6fcd42cde2f02efb8126812a010c297eacefcd090a609639d2aeda6185"}, - {file = "pymongo-3.12.3-cp27-cp27m-win32.whl", hash = "sha256:5fdffb0cfeb4dc8646a5381d32ec981ae8472f29c695bf09e8f7a8edb2db12ca"}, - {file = "pymongo-3.12.3-cp27-cp27m-win_amd64.whl", hash = "sha256:648fcfd8e019b122b7be0e26830a3a2224d57c3e934f19c1e53a77b8380e6675"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3f0ac6e0203bd88863649e6ed9c7cfe53afab304bc8225f2597c4c0a74e4d1f0"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:71c0db2c313ea8a80825fb61b7826b8015874aec29ee6364ade5cb774fe4511b"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b779e87300635b8075e8d5cfd4fdf7f46078cd7610c381d956bca5556bb8f97"}, - {file = "pymongo-3.12.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:351a2efe1c9566c348ad0076f4bf541f4905a0ebe2d271f112f60852575f3c16"}, - {file = "pymongo-3.12.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a02313e71b7c370c43056f6b16c45effbb2d29a44d24403a3d5ba6ed322fa3f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:d3082e5c4d7b388792124f5e805b469109e58f1ab1eb1fbd8b998e8ab766ffb7"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:514e78d20d8382d5b97f32b20c83d1d0452c302c9a135f0a9022236eb9940fda"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:b1b5be40ebf52c3c67ee547e2c4435ed5bc6352f38d23e394520b686641a6be4"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:58db209da08a502ce6948841d522dcec80921d714024354153d00b054571993c"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:5296e5e69243ffd76bd919854c4da6630ae52e46175c804bc4c0e050d937b705"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:51d1d061df3995c2332ae78f036492cc188cb3da8ef122caeab3631a67bb477e"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b974b7f49d65a16ca1435bc1c25a681bb7d630509dd23b2e819ed36da0b7f"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e099b79ccf7c40f18b149a64d3d10639980035f9ceb223169dd806ff1bb0d9cc"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27e5ea64332385385b75414888ce9d1a9806be8616d7cef4ef409f4f256c6d06"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed7d11330e443aeecab23866055e08a5a536c95d2c25333aeb441af2dbac38d2"}, - {file = "pymongo-3.12.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93111fd4e08fa889c126aa8baf5c009a941880a539c87672e04583286517450a"}, - {file = "pymongo-3.12.3-cp310-cp310-win32.whl", hash = "sha256:2301051701b27aff2cbdf83fae22b7ca883c9563dfd088033267291b46196643"}, - {file = "pymongo-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:c7e8221278e5f9e2b6d3893cfc3a3e46c017161a57bb0e6f244826e4cee97916"}, - {file = "pymongo-3.12.3-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:7b4a9fcd95e978cd3c96cdc2096aa54705266551422cf0883c12a4044def31c6"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:06b64cdf5121f86b78a84e61b8f899b6988732a8d304b503ea1f94a676221c06"}, - {file = "pymongo-3.12.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:c8f7dd025cb0bf19e2f60a64dfc24b513c8330e0cfe4a34ccf941eafd6194d9e"}, - {file = "pymongo-3.12.3-cp34-cp34m-win32.whl", hash = "sha256:ab23b0545ec71ea346bf50a5d376d674f56205b729980eaa62cdb7871805014b"}, - {file = "pymongo-3.12.3-cp34-cp34m-win_amd64.whl", hash = "sha256:1b5cb75d2642ff7db823f509641f143f752c0d1ab03166cafea1e42e50469834"}, - {file = "pymongo-3.12.3-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:fc2048d13ff427605fea328cbe5369dce549b8c7657b0e22051a5b8831170af6"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c5f83bb59d0ff60c6fdb1f8a7b0288fbc4640b1f0fd56f5ae2387749c35d34e3"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6632b1c63d58cddc72f43ab9f17267354ddce563dd5e11eadabd222dcc808808"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fedad05147b40ff8a93fcd016c421e6c159f149a2a481cfa0b94bfa3e473bab"}, - {file = "pymongo-3.12.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:208a61db8b8b647fb5b1ff3b52b4ed6dbced01eac3b61009958adb203596ee99"}, - {file = "pymongo-3.12.3-cp35-cp35m-win32.whl", hash = "sha256:3100a2352bdded6232b385ceda0c0a4624598c517d52c2d8cf014b7abbebd84d"}, - {file = "pymongo-3.12.3-cp35-cp35m-win_amd64.whl", hash = "sha256:3492ae1f97209c66af70e863e6420e6301cecb0a51a5efa701058aa73a8ca29e"}, - {file = "pymongo-3.12.3-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:87e18f29bac4a6be76a30e74de9c9005475e27100acf0830679420ce1fd9a6fd"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b3e08aef4ea05afbc0a70cd23c13684e7f5e074f02450964ec5cfa1c759d33d2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e66b3c9f8b89d4fd58a59c04fdbf10602a17c914fbaaa5e6ea593f1d54b06362"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5d67dbc8da2dac1644d71c1839d12d12aa333e266a9964d5b1a49feed036bc94"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:a351986d6c9006308f163c359ced40f80b6cffb42069f3e569b979829951038d"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5296669bff390135528001b4e48d33a7acaffcd361d98659628ece7f282f11aa"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:9d5b66d457d2c5739c184a777455c8fde7ab3600a56d8bbebecf64f7c55169e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:1c771f1a8b3cd2d697baaf57e9cfa4ae42371cacfbea42ea01d9577c06d92f96"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81a3ebc33b1367f301d1c8eda57eec4868e951504986d5d3fe437479dcdac5b2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cf113a46d81cff0559d57aa66ffa473d57d1a9496f97426318b6b5b14fdec1c"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64b9122be1c404ce4eb367ad609b590394587a676d84bfed8e03c3ce76d70560"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6c71e198b36f0f0dfe354f06d3655ecfa30d69493a1da125a9a54668aad652"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33ab8c031f788609924e329003088831045f683931932a52a361d4a955b7dce2"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2b4c95c47fb81b19ea77dc1c50d23af3eba87c9628fcc2e03d44124a3d336ea"}, - {file = "pymongo-3.12.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4e0a3ea7fd01cf0a36509f320226bd8491e0f448f00b8cb89f601c109f6874e1"}, - {file = "pymongo-3.12.3-cp36-cp36m-win32.whl", hash = "sha256:dfec57f15f53d677b8e4535695ff3f37df7f8fe431f2efa8c3c8c4025b53d1eb"}, - {file = "pymongo-3.12.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c22591cff80188dd8543be0b559d0c807f7288bd353dc0bcfe539b4588b3a5cd"}, - {file = "pymongo-3.12.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:7738147cd9dbd6d18d5593b3491b4620e13b61de975fd737283e4ad6c255c273"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:be1f10145f7ea76e3e836fdc5c8429c605675bdcddb0bca9725ee6e26874c00c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:295a5beaecb7bf054c1c6a28749ed72b19f4d4b61edcd8a0815d892424baf780"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:320f8734553c50cffe8a8e1ae36dfc7d7be1941c047489db20a814d2a170d7b5"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:5d20072d81cbfdd8e15e6a0c91fc7e3a4948c71e0adebfc67d3b4bcbe8602711"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:2c46a0afef69d61938a6fe32c3afd75b91dec3ab3056085dc72abbeedcc94166"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:5f530f35e1a57d4360eddcbed6945aecdaee2a491cd3f17025e7b5f2eea88ee7"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6526933760ee1e6090db808f1690a111ec409699c1990efc96f134d26925c37f"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95d15cf81cd2fb926f2a6151a9f94c7aacc102b415e72bc0e040e29332b6731c"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d52a70350ec3dfc39b513df12b03b7f4c8f8ec6873bbf958299999db7b05eb1"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9252c991e8176b5a2fa574c5ab9a841679e315f6e576eb7cf0bd958f3e39b0ad"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:145d78c345a38011497e55aff22c0f8edd40ee676a6810f7e69563d68a125e83"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8e0a086dbbee406cc6f603931dfe54d1cb2fba585758e06a2de01037784b737"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6d5443104f89a840250087863c91484a72f254574848e951d1bdd7d8b2ce7c9"}, - {file = "pymongo-3.12.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6f93dbfa5a461107bc3f5026e0d5180499e13379e9404f07a9f79eb5e9e1303d"}, - {file = "pymongo-3.12.3-cp37-cp37m-win32.whl", hash = "sha256:c9d212e2af72d5c8d082775a43eb726520e95bf1c84826440f74225843975136"}, - {file = "pymongo-3.12.3-cp37-cp37m-win_amd64.whl", hash = "sha256:320a1fe403dd83a35709fcf01083d14bc1462e9789b711201349a9158db3a87e"}, - {file = "pymongo-3.12.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1ba93be779a9b8e5e44f5c133dc1db4313661cead8a2fd27661e6cb8d942ee9"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4294f2c1cd069b793e31c2e6d7ac44b121cf7cedccd03ebcc30f3fc3417b314a"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:845b178bd127bb074835d2eac635b980c58ec5e700ebadc8355062df708d5a71"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:176fdca18391e1206c32fb1d8265628a84d28333c20ad19468d91e3e98312cd1"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:28bfd5244d32faf3e49b5a8d1fab0631e922c26e8add089312e4be19fb05af50"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:f38b35ecd2628bf0267761ed659e48af7e620a7fcccfccf5774e7308fb18325c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:cebb3d8bcac4a6b48be65ebbc5c9881ed4a738e27bb96c86d9d7580a1fb09e05"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:80710d7591d579442c67a3bc7ae9dcba9ff95ea8414ac98001198d894fc4ff46"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d7baa847383b9814de640c6f1a8553d125ec65e2761ad146ea2e75a7ad197c"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:602284e652bb56ca8760f8e88a5280636c5b63d7946fca1c2fe0f83c37dffc64"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2d763d05ec7211313a06e8571236017d3e61d5fef97fcf34ec4b36c0b6556"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6e4dccae8ef5dd76052647d78f02d5d0ffaff1856277d951666c54aeba3ad2"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1fc4d3985868860b6585376e511bb32403c5ffb58b0ed913496c27fd791deea"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4e5d163e6644c2bc84dd9f67bfa89288c23af26983d08fefcc2cbc22f6e57e6"}, - {file = "pymongo-3.12.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8d92c6bb9174d47c2257528f64645a00bbc6324a9ff45a626192797aff01dc14"}, - {file = "pymongo-3.12.3-cp38-cp38-win32.whl", hash = "sha256:b0db9a4691074c347f5d7ee830ab3529bc5ad860939de21c1f9c403daf1eda9a"}, - {file = "pymongo-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:d81047341ab56061aa4b6823c54d4632579c3b16e675089e8f520e9b918a133b"}, - {file = "pymongo-3.12.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07398d8a03545b98282f459f2603a6bb271f4448d484ed7f411121a519a7ea48"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b7df0d99e189b7027d417d4bfd9b8c53c9c7ed5a0a1495d26a6f547d820eca88"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a283425e6a474facd73072d8968812d1d9058490a5781e022ccf8895500b83ce"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2577b8161eeae4dd376d13100b2137d883c10bb457dd08935f60c9f9d4b5c5f6"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:517b09b1dd842390a965a896d1327c55dfe78199c9f5840595d40facbcd81854"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:2567885ff0c8c7c0887ba6cefe4ae4af96364a66a7069f924ce0cd12eb971d04"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:71c5c200fd37a5322706080b09c3ec8907cf01c377a7187f354fc9e9e13abc73"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:14dee106a10b77224bba5efeeb6aee025aabe88eb87a2b850c46d3ee55bdab4a"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f340a2a908644ea6cccd399be0fb308c66e05d2800107345f9f0f0d59e1731c4"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b4c535f524c9d8c86c3afd71d199025daa070859a2bdaf94a298120b0de16db"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8455176fd1b86de97d859fed4ae0ef867bf998581f584c7a1a591246dfec330f"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf254a1a95e95fdf4eaa25faa1ea450a6533ed7a997f9f8e49ab971b61ea514d"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a3540e21213cb8ce232e68a7d0ee49cdd35194856c50b8bd87eeb572fadd42"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0e7a5d0b9077e8c3e57727f797ee8adf12e1d5e7534642230d98980d160d1320"}, - {file = "pymongo-3.12.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0be605bfb8461384a4cb81e80f51eb5ca1b89851f2d0e69a75458c788a7263a4"}, - {file = "pymongo-3.12.3-cp39-cp39-win32.whl", hash = "sha256:2157d68f85c28688e8b723bbe70c8013e0aba5570e08c48b3562f74d33fc05c4"}, - {file = "pymongo-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfa217bf8cf3ff6b30c8e6a89014e0c0e7b50941af787b970060ae5ba04a4ce5"}, - {file = "pymongo-3.12.3-py2.7-macosx-10.14-intel.egg", hash = "sha256:d81299f63dc33cc172c26faf59cc54dd795fc6dd5821a7676cca112a5ee8bbd6"}, - {file = "pymongo-3.12.3.tar.gz", hash = "sha256:0a89cadc0062a5e53664dde043f6c097172b8c1c5f0094490095282ff9995a5f"}, -] +pyicu = [] +pymongo = [] @@ -3100,3 +2978 @@ six = [ -sklearn = [ - {file = "sklearn-0.0.tar.gz", hash = "sha256:e23001573aa194b834122d2b9562459bf5ae494a2d59ca6b8aa22c85a44c0e31"}, -] +sklearn = [] diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index ce6f7ffe..1621d42a 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -20 +20 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.4.0-py3-none-any.whl", develop = false } diff --git a/workers/splits/src/splits/utils.py b/workers/splits/src/splits/utils.py index 85761750..3092856b 100644 --- a/workers/splits/src/splits/utils.py +++ b/workers/splits/src/splits/utils.py @@ -72,0 +73,5 @@ class Queues: + + +class CacheKind(Enum): + SPLITS = "/splits" + FIRST_ROWS = "/first-rows" diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index 4674f4b5..faeaa9c9 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -10,4 +10,4 @@ from libcache.simple_cache import ( - delete_first_rows_responses, - get_dataset_first_rows_response_splits, - get_splits_response, - upsert_splits_response, + delete_response, + get_dataset_response_ids, + get_response_without_content, + upsert_response, @@ -19,0 +20 @@ from splits.utils import ( + CacheKind, @@ -63 +64 @@ class SplitsWorker(Worker): - cache_entry = get_splits_response(dataset) + cached_response = get_response_without_content(kind=CacheKind.SPLITS.value, dataset=dataset) @@ -68,5 +69,6 @@ class SplitsWorker(Worker): - cache_entry["http_status"] == HTTPStatus.OK - and cache_entry["worker_version"] is not None - and self.compare_major_version(cache_entry["worker_version"]) == 0 - and cache_entry["dataset_git_revision"] is not None - and cache_entry["dataset_git_revision"] == dataset_git_revision + # TODO: use "error_code" to decide if the job should be skipped (ex: retry if temporary error) + cached_response["http_status"] == HTTPStatus.OK + and cached_response["worker_version"] is not None + and self.compare_major_version(cached_response["worker_version"]) == 0 + and cached_response["dataset_git_revision"] is not None + and cached_response["dataset_git_revision"] == dataset_git_revision @@ -88,4 +90,5 @@ class SplitsWorker(Worker): - response = splits_response_result["splits_response"] - upsert_splits_response( - dataset_name=dataset, - response=dict(response), + content = splits_response_result["splits_response"] + upsert_response( + kind=CacheKind.SPLITS.value, + dataset=dataset, + content=dict(content), @@ -98,5 +101,10 @@ class SplitsWorker(Worker): - splits_in_cache = get_dataset_first_rows_response_splits(dataset_name=dataset) - new_splits = [(s["dataset"], s["config"], s["split"]) for s in response["splits"]] - splits_to_delete = [s for s in splits_in_cache if s not in new_splits] - for d, c, s in splits_to_delete: - delete_first_rows_responses(dataset_name=d, config_name=c, split_name=s) + new_splits = [(s["dataset"], s["config"], s["split"]) for s in content["splits"]] + # remove obsolete first-rows responses from the cache + first_rows_responses_in_cache = [ + (s["dataset"], s["config"], s["split"]) + for s in get_dataset_response_ids(dataset=dataset) + if s["kind"] == CacheKind.FIRST_ROWS.value + ] + first_rows_responses_to_delete = [s for s in first_rows_responses_in_cache if s not in new_splits] + for d, c, s in first_rows_responses_to_delete: + delete_response(kind=CacheKind.FIRST_ROWS.value, dataset=d, config=c, split=s) @@ -104,2 +112,2 @@ class SplitsWorker(Worker): - f"{len(splits_to_delete)} 'first-rows' responses deleted from the cache for obsolete splits of" - f" dataset={dataset}" + f"{len(first_rows_responses_to_delete)} 'first-rows' responses deleted from the cache for obsolete" + f" splits of dataset={dataset}" @@ -106,0 +115 @@ class SplitsWorker(Worker): + # compute the 'first-rows' responses for the new splits @@ -116,3 +125,4 @@ class SplitsWorker(Worker): - upsert_splits_response( - dataset_name=dataset, - response=dict(err.as_response()), + upsert_response( + kind=CacheKind.SPLITS.value, + dataset=dataset, + content=dict(err.as_response()), @@ -127,3 +137,4 @@ class SplitsWorker(Worker): - upsert_splits_response( - dataset_name=dataset, - response=dict(e.as_response()), + upsert_response( + kind=CacheKind.SPLITS.value, + dataset=dataset, + content=dict(e.as_response()), diff --git a/workers/splits/tests/test_worker.py b/workers/splits/tests/test_worker.py index f8e35865..e1e47474 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/splits/tests/test_worker.py @@ -7,3 +7 @@ import pytest -from libcache.simple_cache import DoesNotExist -from libcache.simple_cache import _clean_database as _clean_cache_database -from libcache.simple_cache import get_splits_response +from libcache.simple_cache import DoesNotExist, _clean_cache_database, get_response @@ -12,0 +11 @@ from splits.config import WorkerConfig +from splits.utils import CacheKind @@ -45,10 +44,10 @@ def test_compute(worker: SplitsWorker, hub_public_csv: str) -> None: - cache_entry = get_splits_response(dataset_name=hub_public_csv) - assert cache_entry["http_status"] == HTTPStatus.OK - assert cache_entry["error_code"] is None - assert cache_entry["worker_version"] == worker.version - assert cache_entry["dataset_git_revision"] is not None - assert cache_entry["error_code"] is None - response = cache_entry["response"] - assert len(response["splits"]) == 1 - assert response["splits"][0]["num_bytes"] is None - assert response["splits"][0]["num_examples"] is None + cached_response = get_response(kind=CacheKind.SPLITS.value, dataset=hub_public_csv) + assert cached_response["http_status"] == HTTPStatus.OK + assert cached_response["error_code"] is None + assert cached_response["worker_version"] == worker.version + assert cached_response["dataset_git_revision"] is not None + assert cached_response["error_code"] is None + content = cached_response["content"] + assert len(content["splits"]) == 1 + assert content["splits"][0]["num_bytes"] is None + assert content["splits"][0]["num_examples"] is None @@ -61 +60 @@ def test_doesnotexist(worker: SplitsWorker) -> None: - get_splits_response(dataset_name=dataset) + get_response(kind=CacheKind.SPLITS.value, dataset=dataset)
35a30dbcd687b26db1f02502ea8305f70c064473
Adrien
2022-11-17T16:53:20
Standardize Helms Charts (#635)
diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index db143712..b8a476f0 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -9,2 +9,10 @@ secrets: - hfToken: datasets-server-hf-token - mongoUrl: false + mongoUrl: + fromSecret: false + secretName: "mongo-url" + value: mongo:// + token: + fromSecret: true + secretName: "datasets-server-hf-token" + +persistence: + existingClaim: "nfs-datasets-server-pvc" @@ -12,4 +19,0 @@ secrets: -storage: - nfs: - path: "/fsx" - server: "svm-08a37cf73026f0b5c.fs-097afa9688029b62a.fsx.us-east-1.amazonaws.com" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 8d8fc05e..3877f00b 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -48,2 +48,7 @@ secrets: - hfToken: hf-token - mongoUrl: mongo-url + mongoUrl: + fromSecret: true + secretName: "mongo-url" + value: mongo:// + token: + fromSecret: true + secretName: "hf-token" @@ -51,6 +56,4 @@ secrets: -storage: - nfs: - path: "/fsx" - server: "svm-0bd5fa426547fca22.fs-02050b8d555063cde.fsx.us-east-1.amazonaws.com" - # https://us-east-1.console.aws.amazon.com/fsx/home?region=us-east-1#file-system-details/fs-02050b8d555063cde - # Alarm: https://us-east-1.console.aws.amazon.com/cloudwatch/home?region=us-east-1#alarmsV2:alarm/Low+disk+on+datasets+server? +persistence: + # https://us-east-1.console.aws.amazon.com/fsx/home?region=us-east-1#file-system-details/fs-02050b8d555063cde + # Alarm: https://us-east-1.console.aws.amazon.com/cloudwatch/home?region=us-east-1#alarmsV2:alarm/Low+disk+on+datasets+server? + existingClaim: "nfs-datasets-server-pvc" diff --git a/chart/templates/_envCache.tpl b/chart/templates/_envCache.tpl new file mode 100644 index 00000000..d5b5ecf4 --- /dev/null +++ b/chart/templates/_envCache.tpl @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envCache" -}} +- name: CACHE_ASSETS_DIRECTORY + value: {{ .Values.cache.assetsDirectory | quote }} +- name: CACHE_MONGO_DATABASE + value: {{ .Values.cache.mongoDatabase | quote }} +- name: CACHE_MONGO_URL + {{- if .Values.secrets.mongoUrl.fromSecret }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl.secretName | quote }} + key: MONGO_URL + optional: false + {{- else }} + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + value: {{ .Values.secrets.mongoUrl.value }} + {{- end }} + {{- end }} +{{- end -}} diff --git a/chart/templates/_envCommon.tpl b/chart/templates/_envCommon.tpl new file mode 100644 index 00000000..65c2e7f4 --- /dev/null +++ b/chart/templates/_envCommon.tpl @@ -0,0 +1,21 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envCommon" -}} +- name: COMMON_ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" +- name: COMMON_HF_ENDPOINT + value: {{ .Values.common.hfEndpoint | quote }} +- name: COMMON_HF_TOKEN + {{- if .Values.secrets.token.fromSecret }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.token.secretName | quote }} + key: HF_TOKEN + optional: false + {{- else }} + value: {{ .Values.secrets.token.value }} + {{- end }} +- name: COMMON_LOG_LEVEL + value: {{ .Values.common.logLevel | quote }} +{{- end -}} diff --git a/chart/templates/_envQueue.tpl b/chart/templates/_envQueue.tpl new file mode 100644 index 00000000..061480b7 --- /dev/null +++ b/chart/templates/_envQueue.tpl @@ -0,0 +1,21 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "envQueue" -}} +- name: QUEUE_MONGO_DATABASE + value: {{ .Values.queue.mongoDatabase | quote }} +- name: QUEUE_MONGO_URL + {{- if .Values.secrets.mongoUrl.fromSecret }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl.secretName | quote }} + key: MONGO_URL + optional: false + {{- else }} + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + value: {{ .Values.secrets.mongoUrl.value }} + {{- end }} + {{- end }} +{{- end -}} diff --git a/chart/templates/_initContainerAssets.tpl b/chart/templates/_initContainerAssets.tpl index 64246828..e220d25a 100644 --- a/chart/templates/_initContainerAssets.tpl +++ b/chart/templates/_initContainerAssets.tpl @@ -14 +14 @@ - name: nfs + name: data diff --git a/chart/templates/_initContainerCache.tpl b/chart/templates/_initContainerCache.tpl index 4504665d..c56aa404 100644 --- a/chart/templates/_initContainerCache.tpl +++ b/chart/templates/_initContainerCache.tpl @@ -14 +14 @@ - name: nfs + name: data diff --git a/chart/templates/_initContainerNumbaCache.tpl b/chart/templates/_initContainerNumbaCache.tpl index 1f9bd87c..77e94fb7 100644 --- a/chart/templates/_initContainerNumbaCache.tpl +++ b/chart/templates/_initContainerNumbaCache.tpl @@ -14 +14 @@ - name: nfs + name: data diff --git a/chart/templates/_securityContext.tpl b/chart/templates/_securityContext.tpl new file mode 100644 index 00000000..23a00571 --- /dev/null +++ b/chart/templates/_securityContext.tpl @@ -0,0 +1,8 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "securityContext" -}} +runAsUser: {{ .Values.uid }} +runAsGroup: {{ .Values.gid }} +runAsNonRoot: true +{{- end -}} diff --git a/chart/templates/_volumeData.tpl b/chart/templates/_volumeData.tpl new file mode 100644 index 00000000..72448740 --- /dev/null +++ b/chart/templates/_volumeData.tpl @@ -0,0 +1,8 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "volumeData" -}} +- name: data + persistentVolumeClaim: + claimName: {{ .Values.persistence.existingClaim | default (include "release" .) }} +{{- end -}} diff --git a/chart/templates/_volumeMountAssets.tpl b/chart/templates/_volumeMountAssets.tpl new file mode 100644 index 00000000..1112b979 --- /dev/null +++ b/chart/templates/_volumeMountAssets.tpl @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "volumeMountAssetsRO" -}} +- mountPath: {{ .Values.cache.assetsDirectory | quote }} + mountPropagation: None + name: data + subPath: "{{ include "assets.subpath" . }}" + readOnly: true +{{- end -}} + +{{- define "volumeMountAssetsRW" -}} +- mountPath: {{ .Values.cache.assetsDirectory | quote }} + mountPropagation: None + name: data + subPath: "{{ include "assets.subpath" . }}" + readOnly: false +{{- end -}} diff --git a/chart/templates/_volumeMountDatasetsCache.tpl b/chart/templates/_volumeMountDatasetsCache.tpl new file mode 100644 index 00000000..0377fd2c --- /dev/null +++ b/chart/templates/_volumeMountDatasetsCache.tpl @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "volumeMountDatasetsCache" -}} +- mountPath: {{ .Values.hfDatasetsCache | quote }} + mountPropagation: None + name: data + subPath: "{{ include "cache.datasets.subpath" . }}" + readOnly: false +{{- end -}} diff --git a/chart/templates/_volumeMountNumbaCache.tpl b/chart/templates/_volumeMountNumbaCache.tpl new file mode 100644 index 00000000..2bc52f2a --- /dev/null +++ b/chart/templates/_volumeMountNumbaCache.tpl @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "volumeMountNumbaCache" -}} +- mountPath: {{ .Values.numbaCacheDirectory | quote }} + mountPropagation: None + name: data + subPath: "{{ include "cache.numba.subpath" . }}" + readOnly: false +{{- end -}} diff --git a/chart/templates/ingress.yaml b/chart/templates/ingress.yaml index 7f30b505..8b3e9f1c 100644 --- a/chart/templates/ingress.yaml +++ b/chart/templates/ingress.yaml @@ -4,5 +4,2 @@ metadata: - annotations: - # to communicate with AWS - {{ toYaml .Values.ingress.annotations | nindent 4 }} - labels: - {{ include "labels.reverseProxy" . | nindent 4 }} + annotations: {{ toYaml .Values.ingress.annotations | nindent 4 }} + labels: {{ include "labels.reverseProxy" . | nindent 4 }} @@ -11,0 +9,6 @@ spec: + {{- if .Values.ingress.tls.enabled }} + tls: + - hosts: + - {{ .Values.hostname }} + secretName: {{ .Values.ingress.tls.secretName }} + {{- end}} diff --git a/chart/templates/jobs/mongodb-migration/_container.tpl b/chart/templates/jobs/mongodb-migration/_container.tpl index 09b87d31..7b44e0e9 100644 --- a/chart/templates/jobs/mongodb-migration/_container.tpl +++ b/chart/templates/jobs/mongodb-migration/_container.tpl @@ -9,38 +9,3 @@ - - name: CACHE_ASSETS_DIRECTORY - value: {{ .Values.cache.assetsDirectory | quote }} - - name: CACHE_MONGO_DATABASE - value: {{ .Values.cache.mongoDatabase | quote }} - - name: CACHE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: QUEUE_MONGO_DATABASE - value: {{ .Values.queue.mongoDatabase | quote }} - - name: QUEUE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: COMMON_ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" - - name: COMMON_HF_ENDPOINT - value: {{ .Values.common.hfEndpoint | quote }} - - name: COMMON_HF_TOKEN - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN - optional: false - - name: COMMON_LOG_LEVEL - value: {{ .Values.common.logLevel | quote }} + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} @@ -50,3 +15 @@ - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} + {{- if .Values.secrets.mongoUrl.fromSecret }} @@ -55 +18 @@ - name: {{ .Values.secrets.mongoUrl | quote }} + name: {{ .Values.secrets.mongoUrl.secretName | quote }} @@ -58 +21,7 @@ - {{- end }} + {{- else }} + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + value: {{ .Values.secrets.mongoUrl.value }} + {{- end }} + {{- end }} @@ -60,5 +29 @@ - - mountPath: {{ .Values.cache.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: false + {{ include "volumeMountAssetsRO" . | nindent 2 }} @@ -67,2 +32 @@ - resources: - {{ toYaml .Values.mongodbMigration.resources | nindent 4 }} + resources: {{ toYaml .Values.mongodbMigration.resources | nindent 4 }} diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml index 2ee5a5f3..c2f81e0f 100644 --- a/chart/templates/jobs/mongodb-migration/job.yaml +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.mongodbMigration" . | nindent 4 }} + labels: {{ include "labels.mongodbMigration" . | nindent 4 }} @@ -18,2 +17 @@ spec: - labels: - {{ include "labels.mongodbMigration" . | nindent 8 }} + labels: {{ include "labels.mongodbMigration" . | nindent 8 }} @@ -22,17 +20,7 @@ spec: - initContainers: - {{ include "initContainerAssets" . | nindent 8 }} - containers: - {{ include "containerMongodbMigration" . | nindent 8 }} - nodeSelector: - {{ toYaml .Values.mongodbMigration.nodeSelector | nindent 8 }} - tolerations: - {{ toYaml .Values.mongodbMigration.tolerations | nindent 8 }} - volumes: - - name: nfs - nfs: - server: {{ .Values.storage.nfs.server }} - path: {{ .Values.storage.nfs.path }} - securityContext: - runAsUser: {{ .Values.uid }} - runAsGroup: {{ .Values.gid }} - runAsNonRoot: true + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: {{ include "initContainerAssets" . | nindent 8 }} + containers: {{ include "containerMongodbMigration" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.mongodbMigration.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.mongodbMigration.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/pvc.yaml b/chart/templates/pvc.yaml new file mode 100644 index 00000000..3b602098 --- /dev/null +++ b/chart/templates/pvc.yaml @@ -0,0 +1,15 @@ +{{- if (not .Values.persistence.existingClaim) }} +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + labels: {{ include "labels" . | nindent 4 }} + name: {{ include "release" . }} + namespace: {{ .Release.Namespace }} +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: {{ .Values.persistence.size }} + storageClassName: {{ .Values.persistence.storageClass }} +{{- end }} diff --git a/chart/templates/reverse-proxy/_container.tpl b/chart/templates/reverse-proxy/_container.tpl index 24a372c0..f18e1231 100644 --- a/chart/templates/reverse-proxy/_container.tpl +++ b/chart/templates/reverse-proxy/_container.tpl @@ -19,0 +20 @@ + {{ include "volumeMountAssetsRO" . | nindent 2 }} @@ -32,5 +32,0 @@ - - mountPath: {{ .Values.cache.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: true diff --git a/chart/templates/reverse-proxy/configMap.yaml b/chart/templates/reverse-proxy/configMap.yaml index 72021abf..3606cd50 100644 --- a/chart/templates/reverse-proxy/configMap.yaml +++ b/chart/templates/reverse-proxy/configMap.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.reverseProxy" . | nindent 4 }} + labels: {{ include "labels.reverseProxy" . | nindent 4 }} diff --git a/chart/templates/reverse-proxy/deployment.yaml b/chart/templates/reverse-proxy/deployment.yaml index 897d30fb..ea0edb41 100644 --- a/chart/templates/reverse-proxy/deployment.yaml +++ b/chart/templates/reverse-proxy/deployment.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.reverseProxy" . | nindent 4 }} + labels: {{ include "labels.reverseProxy" . | nindent 4 }} @@ -16,2 +15 @@ spec: - matchLabels: - {{ include "labels.reverseProxy" . | nindent 6 }} + matchLabels: {{ include "labels.reverseProxy" . | nindent 6 }} @@ -25,2 +23 @@ spec: - labels: - {{ include "labels.reverseProxy" . | nindent 8 }} + labels: {{ include "labels.reverseProxy" . | nindent 8 }} @@ -31,8 +28,5 @@ spec: - initContainers: - {{ include "initContainerAssets" . | nindent 8 }} - containers: - {{ include "containerReverseProxy" . | nindent 8 }} - nodeSelector: - {{ toYaml .Values.reverseProxy.nodeSelector | nindent 8 }} - tolerations: - {{ toYaml .Values.reverseProxy.tolerations | nindent 8 }} + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: {{ include "initContainerAssets" . | nindent 8 }} + containers: {{ include "containerReverseProxy" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.reverseProxy.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.reverseProxy.tolerations | nindent 8 }} @@ -40,4 +34 @@ spec: - - name: nfs - nfs: - server: {{ .Values.storage.nfs.server }} - path: {{ .Values.storage.nfs.path }} + {{ include "volumeData" . | nindent 6 }} diff --git a/chart/templates/reverse-proxy/service.yaml b/chart/templates/reverse-proxy/service.yaml index 5b99652a..124d9d9d 100644 --- a/chart/templates/reverse-proxy/service.yaml +++ b/chart/templates/reverse-proxy/service.yaml @@ -8,2 +8 @@ metadata: - annotations: - {{ toYaml .Values.reverseProxy.service.annotations | nindent 4 }} + annotations: {{ toYaml .Values.reverseProxy.service.annotations | nindent 4 }} @@ -11,2 +10 @@ metadata: - labels: - {{ include "labels.reverseProxy" . | nindent 4 }} + labels: {{ include "labels.reverseProxy" . | nindent 4 }} @@ -19,2 +17 @@ spec: - selector: - {{ include "labels.reverseProxy" . | nindent 4 }} + selector: {{ include "labels.reverseProxy" . | nindent 4 }} diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index 5b58d33a..cd23cbf0 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -9,38 +9,3 @@ - - name: CACHE_ASSETS_DIRECTORY - value: {{ .Values.cache.assetsDirectory | quote }} - - name: CACHE_MONGO_DATABASE - value: {{ .Values.cache.mongoDatabase | quote }} - - name: CACHE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: QUEUE_MONGO_DATABASE - value: {{ .Values.queue.mongoDatabase | quote }} - - name: QUEUE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: COMMON_ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" - - name: COMMON_HF_ENDPOINT - value: {{ .Values.common.hfEndpoint | quote }} - - name: COMMON_HF_TOKEN - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN - optional: false - - name: COMMON_LOG_LEVEL - value: {{ .Values.common.logLevel | quote }} + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} @@ -63,6 +28 @@ - volumeMounts: - - mountPath: {{ .Values.cache.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: false + volumeMounts: {{ include "volumeMountAssetsRO" . | nindent 2 }} diff --git a/chart/templates/services/admin/deployment.yaml b/chart/templates/services/admin/deployment.yaml index a5061067..5e8e6553 100644 --- a/chart/templates/services/admin/deployment.yaml +++ b/chart/templates/services/admin/deployment.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.admin" . | nindent 4 }} + labels: {{ include "labels.admin" . | nindent 4 }} @@ -16,2 +15 @@ spec: - matchLabels: - {{ include "labels.admin" . | nindent 6 }} + matchLabels: {{ include "labels.admin" . | nindent 6 }} @@ -25,2 +23 @@ spec: - labels: - {{ include "labels.admin" . | nindent 8 }} + labels: {{ include "labels.admin" . | nindent 8 }} @@ -28,17 +25,7 @@ spec: - initContainers: - {{ include "initContainerAssets" . | nindent 8 }} - containers: - {{ include "containerAdmin" . | nindent 8 }} - nodeSelector: - {{ toYaml .Values.admin.nodeSelector | nindent 8 }} - tolerations: - {{ toYaml .Values.admin.tolerations | nindent 8 }} - volumes: - - name: nfs - nfs: - server: {{ .Values.storage.nfs.server }} - path: {{ .Values.storage.nfs.path }} - securityContext: - runAsUser: {{ .Values.uid }} - runAsGroup: {{ .Values.gid }} - runAsNonRoot: true + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: {{ include "initContainerAssets" . | nindent 8 }} + containers: {{ include "containerAdmin" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.admin.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.admin.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/services/admin/service.yaml b/chart/templates/services/admin/service.yaml index 57679886..e4855e62 100644 --- a/chart/templates/services/admin/service.yaml +++ b/chart/templates/services/admin/service.yaml @@ -8,2 +8 @@ metadata: - annotations: - {{ toYaml .Values.admin.service.annotations | nindent 4 }} + annotations: {{ toYaml .Values.admin.service.annotations | nindent 4 }} @@ -11,2 +10 @@ metadata: - labels: - {{ include "labels.admin" . | nindent 4 }} + labels: {{ include "labels.admin" . | nindent 4 }} @@ -19,2 +17 @@ spec: - selector: - {{ include "labels.admin" . | nindent 4 }} + selector: {{ include "labels.admin" . | nindent 4 }} diff --git a/chart/templates/services/admin/servicemonitor.yaml b/chart/templates/services/admin/servicemonitor.yaml index 8500e0fb..de2f0d92 100644 --- a/chart/templates/services/admin/servicemonitor.yaml +++ b/chart/templates/services/admin/servicemonitor.yaml @@ -8,2 +8 @@ metadata: - labels: - {{ include "labels.admin" . | nindent 4 }} + labels: {{ include "labels.admin" . | nindent 4 }} @@ -20,2 +19 @@ spec: - matchLabels: - {{ include "labels.admin" . | nindent 6 }} + matchLabels: {{ include "labels.admin" . | nindent 6 }} diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl index 56eb38bf..2293aad3 100644 --- a/chart/templates/services/api/_container.tpl +++ b/chart/templates/services/api/_container.tpl @@ -9,38 +9,3 @@ - - name: CACHE_ASSETS_DIRECTORY - value: {{ .Values.cache.assetsDirectory | quote }} - - name: CACHE_MONGO_DATABASE - value: {{ .Values.cache.mongoDatabase | quote }} - - name: CACHE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: QUEUE_MONGO_DATABASE - value: {{ .Values.queue.mongoDatabase | quote }} - - name: QUEUE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: COMMON_ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" - - name: COMMON_HF_ENDPOINT - value: {{ .Values.common.hfEndpoint | quote }} - - name: COMMON_HF_TOKEN - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN - optional: false - - name: COMMON_LOG_LEVEL - value: {{ .Values.common.logLevel | quote }} + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} @@ -61,6 +26 @@ - volumeMounts: - - mountPath: {{ .Values.cache.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: true + volumeMounts: {{ include "volumeMountAssetsRO" . | nindent 2 }} diff --git a/chart/templates/services/api/deployment.yaml b/chart/templates/services/api/deployment.yaml index 0046d64d..0fb76b4d 100644 --- a/chart/templates/services/api/deployment.yaml +++ b/chart/templates/services/api/deployment.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.api" . | nindent 4 }} + labels: {{ include "labels.api" . | nindent 4 }} @@ -16,2 +15 @@ spec: - matchLabels: - {{ include "labels.api" . | nindent 6 }} + matchLabels: {{ include "labels.api" . | nindent 6 }} @@ -25,2 +23 @@ spec: - labels: - {{ include "labels.api" . | nindent 8 }} + labels: {{ include "labels.api" . | nindent 8 }} @@ -28,17 +25,7 @@ spec: - initContainers: - {{ include "initContainerAssets" . | nindent 8 }} - containers: - {{ include "containerApi" . | nindent 8 }} - nodeSelector: - {{ toYaml .Values.api.nodeSelector | nindent 8 }} - tolerations: - {{ toYaml .Values.api.tolerations | nindent 8 }} - volumes: - - name: nfs - nfs: - server: {{ .Values.storage.nfs.server }} - path: {{ .Values.storage.nfs.path }} - securityContext: - runAsUser: {{ .Values.uid }} - runAsGroup: {{ .Values.gid }} - runAsNonRoot: true + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} + initContainers: {{ include "initContainerAssets" . | nindent 8 }} + containers: {{ include "containerApi" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.api.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.api.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/services/api/service.yaml b/chart/templates/services/api/service.yaml index 73a6c302..f270ce4b 100644 --- a/chart/templates/services/api/service.yaml +++ b/chart/templates/services/api/service.yaml @@ -8,2 +8 @@ metadata: - annotations: - {{ toYaml .Values.api.service.annotations | nindent 4 }} + annotations: {{ toYaml .Values.api.service.annotations | nindent 4 }} @@ -11,2 +10 @@ metadata: - labels: - {{ include "labels.api" . | nindent 4 }} + labels: {{ include "labels.api" . | nindent 4 }} @@ -19,2 +17 @@ spec: - selector: - {{ include "labels.api" . | nindent 4 }} + selector: {{ include "labels.api" . | nindent 4 }} diff --git a/chart/templates/services/api/servicemonitor.yaml b/chart/templates/services/api/servicemonitor.yaml index 2ceffe2a..3bfe8cea 100644 --- a/chart/templates/services/api/servicemonitor.yaml +++ b/chart/templates/services/api/servicemonitor.yaml @@ -8,2 +8 @@ metadata: - labels: - {{ include "labels.api" . | nindent 4 }} + labels: {{ include "labels.api" . | nindent 4 }} @@ -20,2 +19 @@ spec: - matchLabels: - {{ include "labels.api" . | nindent 6 }} + matchLabels: {{ include "labels.api" . | nindent 6 }} diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index d36bb62d..e5068ec5 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -9,14 +9,3 @@ - - name: CACHE_ASSETS_DIRECTORY - value: {{ .Values.cache.assetsDirectory | quote }} - - name: CACHE_MONGO_DATABASE - value: {{ .Values.cache.mongoDatabase | quote }} - - name: CACHE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} @@ -31,12 +19,0 @@ - - name: QUEUE_MONGO_DATABASE - value: {{ .Values.queue.mongoDatabase | quote }} - - name: QUEUE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} @@ -45,12 +21,0 @@ - - name: COMMON_ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" - - name: COMMON_HF_ENDPOINT - value: {{ .Values.common.hfEndpoint | quote }} - - name: COMMON_HF_TOKEN - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN - optional: false - - name: COMMON_LOG_LEVEL - value: {{ .Values.common.logLevel | quote }} @@ -76,15 +41,3 @@ - - mountPath: {{ .Values.cache.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: false - - mountPath: {{ .Values.hfDatasetsCache | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "cache.datasets.subpath" . }}" - readOnly: false - - mountPath: {{ .Values.numbaCacheDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "cache.numba.subpath" . }}" - readOnly: false + {{ include "volumeMountAssetsRW" . | nindent 2 }} + {{ include "volumeMountDatasetsCache" . | nindent 2 }} + {{ include "volumeMountNumbaCache" . | nindent 2 }} @@ -93,2 +46 @@ - resources: - {{ toYaml .Values.firstRows.resources | nindent 4 }} + resources: {{ toYaml .Values.firstRows.resources | nindent 4 }} diff --git a/chart/templates/worker/first-rows/deployment.yaml b/chart/templates/worker/first-rows/deployment.yaml index 49a14bff..63930848 100644 --- a/chart/templates/worker/first-rows/deployment.yaml +++ b/chart/templates/worker/first-rows/deployment.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.firstRows" . | nindent 4 }} + labels: {{ include "labels.firstRows" . | nindent 4 }} @@ -16,2 +15 @@ spec: - matchLabels: - {{ include "labels.firstRows" . | nindent 6 }} + matchLabels: {{ include "labels.firstRows" . | nindent 6 }} @@ -22,2 +20 @@ spec: - labels: - {{ include "labels.firstRows" . | nindent 8 }} + labels: {{ include "labels.firstRows" . | nindent 8 }} @@ -24,0 +22 @@ spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} @@ -29,15 +27,5 @@ spec: - containers: - {{ include "containerWorkerFirstRows" . | nindent 8 }} - nodeSelector: - {{ toYaml .Values.firstRows.nodeSelector | nindent 8 }} - tolerations: - {{ toYaml .Values.firstRows.tolerations | nindent 8 }} - volumes: - - name: nfs - nfs: - server: {{ .Values.storage.nfs.server }} - path: {{ .Values.storage.nfs.path }} - securityContext: - runAsUser: {{ .Values.uid }} - runAsGroup: {{ .Values.gid }} - runAsNonRoot: true + containers: {{ include "containerWorkerFirstRows" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.firstRows.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.firstRows.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index fe473b83..e7a0a306 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -9,14 +9,3 @@ - - name: CACHE_ASSETS_DIRECTORY - value: {{ .Values.cache.assetsDirectory | quote }} - - name: CACHE_MONGO_DATABASE - value: {{ .Values.cache.mongoDatabase | quote }} - - name: CACHE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} + {{ include "envCache" . | nindent 2 }} + {{ include "envQueue" . | nindent 2 }} + {{ include "envCommon" . | nindent 2 }} @@ -31,12 +19,0 @@ - - name: QUEUE_MONGO_DATABASE - value: {{ .Values.queue.mongoDatabase | quote }} - - name: QUEUE_MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} @@ -45,12 +21,0 @@ - - name: COMMON_ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" - - name: COMMON_HF_ENDPOINT - value: {{ .Values.common.hfEndpoint | quote }} - - name: COMMON_HF_TOKEN - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN - optional: false - - name: COMMON_LOG_LEVEL - value: {{ .Values.common.logLevel | quote }} @@ -66,15 +31,3 @@ - - mountPath: {{ .Values.cache.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: false - - mountPath: {{ .Values.hfDatasetsCache | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "cache.datasets.subpath" . }}" - readOnly: false - - mountPath: {{ .Values.numbaCacheDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "cache.numba.subpath" . }}" - readOnly: false + {{ include "volumeMountAssetsRO" . | nindent 2 }} + {{ include "volumeMountDatasetsCache" . | nindent 2 }} + {{ include "volumeMountNumbaCache" . | nindent 2 }} @@ -83,2 +36 @@ - resources: - {{ toYaml .Values.splits.resources | nindent 4 }} + resources: {{ toYaml .Values.splits.resources | nindent 4 }} diff --git a/chart/templates/worker/splits/deployment.yaml b/chart/templates/worker/splits/deployment.yaml index 1fd69c87..fb1cd610 100644 --- a/chart/templates/worker/splits/deployment.yaml +++ b/chart/templates/worker/splits/deployment.yaml @@ -7,2 +7 @@ metadata: - labels: - {{ include "labels.splits" . | nindent 4 }} + labels: {{ include "labels.splits" . | nindent 4 }} @@ -16,2 +15 @@ spec: - matchLabels: - {{ include "labels.splits" . | nindent 6 }} + matchLabels: {{ include "labels.splits" . | nindent 6 }} @@ -22,2 +20 @@ spec: - labels: - {{ include "labels.splits" . | nindent 8 }} + labels: {{ include "labels.splits" . | nindent 8 }} @@ -24,0 +22 @@ spec: + imagePullSecrets: {{ toYaml .Values.imagePullSecrets | nindent 8 }} @@ -28,15 +26,5 @@ spec: - containers: - {{ include "containerWorkerSplits" . | nindent 8 }} - nodeSelector: - {{ toYaml .Values.splits.nodeSelector | nindent 8 }} - tolerations: - {{ toYaml .Values.splits.tolerations | nindent 8 }} - volumes: - - name: nfs - nfs: - server: {{ .Values.storage.nfs.server }} - path: {{ .Values.storage.nfs.path }} - securityContext: - runAsUser: {{ .Values.uid }} - runAsGroup: {{ .Values.gid }} - runAsNonRoot: true + containers: {{ include "containerWorkerSplits" . | nindent 8 }} + nodeSelector: {{ toYaml .Values.splits.nodeSelector | nindent 8 }} + tolerations: {{ toYaml .Values.splits.tolerations | nindent 8 }} + volumes: {{ include "volumeData" . | nindent 8 }} + securityContext: {{ include "securityContext" . | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index 4d38807c..f517e80f 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -6,2 +6,8 @@ secrets: - hfToken: datasets-server-hf-token - mongoUrl: false + mongoUrl: + fromSecret: false + secretName: "mongo-url" + value: mongo:// + token: + fromSecret: false + secretName: "datasets-server-hf-token" + value: hf_app @@ -12,2 +18,4 @@ gid: 3000 -storage: - nfs: {} +persistence: + existingClaim: "" + storageClass: "" + size: 20Gi @@ -26 +34,3 @@ mongodb: -# overriden by docker-images.yaml (which must be in JSON format!) +imagePullSecrets: [] + +# overridden by docker-images.yaml (which must be in JSON format!) @@ -107,0 +118,3 @@ ingress: + tls: + enabled: false + secretName: ""
076a0dd2ab98d74e6e8990bba241c69bff3042a0
Sylvain Lesage
2022-11-16T16:15:03
feat: 🎸 upgrade huggingface_hub to 0.11.0 (#643)
diff --git a/e2e/poetry.lock b/e2e/poetry.lock index 3f42b0ad..1da2ce67 100644 --- a/e2e/poetry.lock +++ b/e2e/poetry.lock @@ -170 +170 @@ name = "huggingface-hub" -version = "0.8.1" +version = "0.11.0" @@ -184,0 +185,7 @@ typing-extensions = ">=3.7.4.3" +all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["toml", "fastai (>=2.4)", "fastcore (>=1.3.27)"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["tensorflow", "pydot", "graphviz"] +testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile"] @@ -186,6 +193 @@ torch = ["torch"] -testing = ["soundfile", "datasets", "pytest-cov", "pytest"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -quality = ["flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (>=22.0,<23.0)"] -fastai = ["fastcore (>=1.3.27)", "fastai (>=2.4)", "toml"] -dev = ["flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (>=22.0,<23.0)", "soundfile", "datasets", "pytest-cov", "pytest"] -all = ["flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (>=22.0,<23.0)", "soundfile", "datasets", "pytest-cov", "pytest"] +typing = ["types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -580 +582 @@ python-versions = "3.9.6" -content-hash = "c70d795e1d1e5f16294299e275c95212b13269a7a985b515448b6872c70098c7" +content-hash = "3b374e0aa099d50d272c1c3f58b5a6f5e4a1cb8b2e13c313108423a8b2ff723c" diff --git a/e2e/pyproject.toml b/e2e/pyproject.toml index bbee6b98..01e1da88 100644 --- a/e2e/pyproject.toml +++ b/e2e/pyproject.toml @@ -19 +19 @@ flake8 = "^3.9.2" -huggingface-hub = "^0.8.1" +huggingface-hub = "^0.11.0" diff --git a/e2e/tests/fixtures/hub.py b/e2e/tests/fixtures/hub.py index 7f560407..111580bf 100644 --- a/e2e/tests/fixtures/hub.py +++ b/e2e/tests/fixtures/hub.py @@ -12 +12 @@ import requests -from huggingface_hub.hf_api import ( # type: ignore +from huggingface_hub.hf_api import ( @@ -16 +16 @@ from huggingface_hub.hf_api import ( # type: ignore - _raise_for_status, + hf_raise_for_status, @@ -74,2 +73,0 @@ def update_repo_settings( - token, name = hf_api._validate_or_retrieve_token(token, name, function_name="update_repo_settings") - @@ -77 +75 @@ def update_repo_settings( - namespace = hf_api.whoami(token)["name"] + namespace = hf_api.whoami(token=token)["name"] @@ -98 +96 @@ def update_repo_settings( - _raise_for_status(r) + hf_raise_for_status(r) diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index be787642..797799c6 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -255 +255 @@ name = "huggingface-hub" -version = "0.8.1" +version = "0.11.0" @@ -269,0 +270,7 @@ typing-extensions = ">=3.7.4.3" +all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["toml", "fastai (>=2.4)", "fastcore (>=1.3.27)"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["tensorflow", "pydot", "graphviz"] +testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile"] @@ -271,6 +278 @@ torch = ["torch"] -testing = ["soundfile", "datasets", "pytest-cov", "pytest"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -quality = ["flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (>=22.0,<23.0)"] -fastai = ["fastcore (>=1.3.27)", "fastai (>=2.4)", "toml"] -dev = ["flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (>=22.0,<23.0)", "soundfile", "datasets", "pytest-cov", "pytest"] -all = ["flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (>=22.0,<23.0)", "soundfile", "datasets", "pytest-cov", "pytest"] +typing = ["types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -857 +859 @@ python-versions = "3.9.6" -content-hash = "1c5f9363c3a770b15492b638f5099c237841884af6bb9209174fb7d46de3b82e" +content-hash = "10fcf72c819681e16af3961e2ace8a2631b251459b4662dc52dfe9b7ee394bc1" diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 889bbe3c..f2e466c8 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -10 +10 @@ environs = "^9.5.0" -huggingface-hub = "^0.8.1" +huggingface-hub = "^0.11.0" diff --git a/services/admin/src/admin/dataset.py b/services/admin/src/admin/dataset.py index 9654ea94..c34cc773 100644 --- a/services/admin/src/admin/dataset.py +++ b/services/admin/src/admin/dataset.py @@ -7,2 +7,2 @@ from typing import Optional -from huggingface_hub.hf_api import HfApi # type: ignore -from huggingface_hub.utils import RepositoryNotFoundError # type: ignore +from huggingface_hub.hf_api import HfApi +from huggingface_hub.utils import RepositoryNotFoundError diff --git a/services/admin/src/admin/scripts/refresh_cache.py b/services/admin/src/admin/scripts/refresh_cache.py index dd69d734..c1cc4aa3 100644 --- a/services/admin/src/admin/scripts/refresh_cache.py +++ b/services/admin/src/admin/scripts/refresh_cache.py @@ -7 +7 @@ from typing import List -from huggingface_hub.hf_api import HfApi # type: ignore +from huggingface_hub.hf_api import HfApi diff --git a/services/admin/src/admin/scripts/refresh_cache_canonical.py b/services/admin/src/admin/scripts/refresh_cache_canonical.py index d08d885c..70a58a12 100644 --- a/services/admin/src/admin/scripts/refresh_cache_canonical.py +++ b/services/admin/src/admin/scripts/refresh_cache_canonical.py @@ -6 +6 @@ import logging -from huggingface_hub.hf_api import HfApi # type: ignore +from huggingface_hub.hf_api import HfApi diff --git a/services/admin/tests/fixtures/hub.py b/services/admin/tests/fixtures/hub.py index dd960c1e..16395071 100644 --- a/services/admin/tests/fixtures/hub.py +++ b/services/admin/tests/fixtures/hub.py @@ -12 +12 @@ import requests -from huggingface_hub.hf_api import ( # type: ignore +from huggingface_hub.hf_api import ( @@ -16 +16 @@ from huggingface_hub.hf_api import ( # type: ignore - _raise_for_status, + hf_raise_for_status, @@ -70,2 +69,0 @@ def update_repo_settings( - token, name = hf_api._validate_or_retrieve_token(token, name, function_name="update_repo_settings") - @@ -73 +71 @@ def update_repo_settings( - namespace = hf_api.whoami(token)["name"] + namespace = hf_api.whoami(token=token)["name"] @@ -94 +92 @@ def update_repo_settings( - _raise_for_status(r) + hf_raise_for_status(r) diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 6e7efbe0..4b6763f8 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -255 +255 @@ name = "huggingface-hub" -version = "0.9.1" +version = "0.11.0" @@ -269,0 +270,7 @@ typing-extensions = ">=3.7.4.3" +all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["toml", "fastai (>=2.4)", "fastcore (>=1.3.27)"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["tensorflow", "pydot", "graphviz"] +testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile"] @@ -271,6 +278 @@ torch = ["torch"] -testing = ["soundfile", "datasets", "pytest-cov", "pytest"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -quality = ["flake8-bugbear", "flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (==22.3)"] -fastai = ["fastcore (>=1.3.27)", "fastai (>=2.4)", "toml"] -dev = ["flake8-bugbear", "flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (==22.3)", "soundfile", "datasets", "pytest-cov", "pytest"] -all = ["flake8-bugbear", "flake8 (>=3.8.3)", "isort (>=5.5.4)", "black (==22.3)", "soundfile", "datasets", "pytest-cov", "pytest"] +typing = ["types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -899 +901 @@ python-versions = "3.9.6" -content-hash = "b6605d97e1dc8c57c112f61b61bb7aece4369555bb43995d5830daa41064b86b" +content-hash = "0388f148c99c5bb8ee39a81073ecf4b153321250eb1fb684aba1add0c854ac8b" diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 3db6460d..001949b6 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -9 +9,2 @@ license = "Apache-2.0" -huggingface-hub = "^0.9.1" +environs = "^9.5.0" +huggingface-hub = "^0.11.0" @@ -19 +19,0 @@ watchdog = { extras = ["watchmedo"], version = "^2.1.3" } -environs = "^9.5.0" diff --git a/services/api/src/api/dataset.py b/services/api/src/api/dataset.py index 7ba89ff3..ea08aac3 100644 --- a/services/api/src/api/dataset.py +++ b/services/api/src/api/dataset.py @@ -8,2 +8,2 @@ from typing import Optional -from huggingface_hub.hf_api import HfApi # type: ignore -from huggingface_hub.utils import RepositoryNotFoundError # type: ignore +from huggingface_hub.hf_api import HfApi +from huggingface_hub.utils import RepositoryNotFoundError diff --git a/services/api/tests/test_app.py b/services/api/tests/test_app.py index 6701d56c..49d41b96 100644 --- a/services/api/tests/test_app.py +++ b/services/api/tests/test_app.py @@ -117 +117 @@ def test_get_splits(client: TestClient) -> None: - ({}, 404, "SplitsResponseNotFound"), + ({}, 500, "SplitsResponseNotReady"), diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 1ec8181c..a08b28cc 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -351 +351 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -357 +357 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -690 +690 @@ name = "huggingface-hub" -version = "0.10.1" +version = "0.11.0" @@ -705 +705 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy"] +all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -707 +707 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy"] +dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -709 +709 @@ fastai = ["toml", "fastai (>=2.4)", "fastcore (>=1.3.27)"] -quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] @@ -711 +711 @@ tensorflow = ["tensorflow", "pydot", "graphviz"] -testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile"] @@ -712,0 +713 @@ torch = ["torch"] +typing = ["types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -2288 +2289 @@ python-versions = "3.9.6" -content-hash = "7ad5f0d9ac949dab60378f6974037a6a2deed213755f83064b8450d2cea6d55f" +content-hash = "14a3bc11393fb1360f9ce68375795fdc65008e4815d7a7e620bf26fff7ab009b" diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index f21dee06..05395184 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -16,0 +17 @@ gdown = "^4.2.0" +huggingface-hub = "^0.11.0" diff --git a/workers/first_rows/src/first_rows/response.py b/workers/first_rows/src/first_rows/response.py index 9776355d..4b2726fb 100644 --- a/workers/first_rows/src/first_rows/response.py +++ b/workers/first_rows/src/first_rows/response.py @@ -18 +18 @@ from datasets.data_files import EmptyDatasetError as _EmptyDatasetError -from huggingface_hub.hf_api import HfApi, RepositoryNotFoundError # type: ignore +from huggingface_hub.hf_api import HfApi, RepositoryNotFoundError @@ -307 +306,0 @@ def get_dataset_git_revision( - use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False @@ -309 +308 @@ def get_dataset_git_revision( - dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) + dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/first_rows/tests/fixtures/hub.py index fd745c68..cb2e8374 100644 --- a/workers/first_rows/tests/fixtures/hub.py +++ b/workers/first_rows/tests/fixtures/hub.py @@ -15 +15 @@ from datasets import Dataset -from huggingface_hub.hf_api import ( # type: ignore +from huggingface_hub.hf_api import ( diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 63617fbf..389a4415 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -690 +690 @@ name = "huggingface-hub" -version = "0.10.1" +version = "0.11.0" @@ -705 +705 @@ typing-extensions = ">=3.7.4.3" -all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy"] +all = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -707 +707 @@ cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy"] +dev = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "mypy (==0.982)", "types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -709 +709 @@ fastai = ["toml", "fastai (>=2.4)", "fastcore (>=1.3.27)"] -quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] @@ -711 +711 @@ tensorflow = ["tensorflow", "pydot", "graphviz"] -testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "isort (>=5.5.4)", "jedi", "jinja2", "pytest", "pytest-cov", "pytest-env", "soundfile"] @@ -712,0 +713 @@ torch = ["torch"] +typing = ["types-pyyaml", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -2288 +2289 @@ python-versions = "3.9.6" -content-hash = "7ad5f0d9ac949dab60378f6974037a6a2deed213755f83064b8450d2cea6d55f" +content-hash = "b47daad64be3fa9cc6433c97646a6df2193d0746216404ca26209b547f45ddcd" diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 7c24a63a..ce6f7ffe 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "~2.6.0" } +datasets = { extras = ["audio", "vision"], version = "~2.6.1" } @@ -16,0 +17 @@ gdown = "^4.2.0" +huggingface-hub = "^0.11.0" diff --git a/workers/splits/src/splits/response.py b/workers/splits/src/splits/response.py index 7b89616b..c223ec2c 100644 --- a/workers/splits/src/splits/response.py +++ b/workers/splits/src/splits/response.py @@ -14,2 +14,2 @@ from datasets.data_files import EmptyDatasetError as _EmptyDatasetError -from huggingface_hub.hf_api import HfApi # type: ignore -from huggingface_hub.utils import RepositoryNotFoundError # type: ignore +from huggingface_hub.hf_api import HfApi +from huggingface_hub.utils import RepositoryNotFoundError @@ -86 +85,0 @@ def get_dataset_git_revision( - use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False @@ -88 +87 @@ def get_dataset_git_revision( - dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) + dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, token=hf_token) diff --git a/workers/splits/tests/fixtures/hub.py b/workers/splits/tests/fixtures/hub.py index fb98f93c..c0c04ad8 100644 --- a/workers/splits/tests/fixtures/hub.py +++ b/workers/splits/tests/fixtures/hub.py @@ -15 +15 @@ from datasets import Dataset -from huggingface_hub.hf_api import ( # type: ignore +from huggingface_hub.hf_api import (
1a48e05b87b0b00e64d123236b4ea6b488711a2e
Sylvain Lesage
2022-11-16T14:36:59
Force job (#642)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index eaf106a8..093e67b8 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5 +5 @@ - "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-a514e54" + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-61c45d0" @@ -8,2 +8,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-a9824d5", - "api": "huggingface/datasets-server-services-api:sha-a9824d5" + "admin": "huggingface/datasets-server-services-admin:sha-a1de302", + "api": "huggingface/datasets-server-services-api:sha-61c45d0" @@ -12,2 +12,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-a9824d5", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-450cc91" + "splits": "huggingface/datasets-server-workers-splits:sha-61c45d0", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-61c45d0" diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index d3360a0a..5436af28 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -272 +272 @@ name = "libqueue" -version = "0.4.8" +version = "0.4.10" @@ -288 +288 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl" @@ -677 +677 @@ python-versions = "3.9.6" -content-hash = "1406e01f36b3a97a1ac5a6e5ede76e0a63583893e59435680b678d629e8ffb46" +content-hash = "27f59b12b7c1b7cc620d908dd09859ce5c59393bac5a13587773838bf49f4a99" @@ -730 +730 @@ libqueue = [ - {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, + {file = "libqueue-0.4.10-py3-none-any.whl", hash = "sha256:38d15ed5797cf2e90d11e09e2570e4430ee774496d2bb54f19530b5466f33690"}, diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index b4670064..2aa153e6 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -12 +12 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl", develop = false } diff --git a/jobs/mongodb_migration/src/mongodb_migration/collector.py b/jobs/mongodb_migration/src/mongodb_migration/collector.py index 49463307..b7091191 100644 --- a/jobs/mongodb_migration/src/mongodb_migration/collector.py +++ b/jobs/mongodb_migration/src/mongodb_migration/collector.py @@ -7,0 +8,3 @@ from mongodb_migration.migrations._20221110230400_example import MigrationExampl +from mongodb_migration.migrations._20221116133500_queue_job_add_force import ( + MigrationAddForceToJob, +) @@ -14,0 +18,3 @@ class MigrationsCollector: + MigrationAddForceToJob( + version="20221116133500", description="add 'force' field to jobs in queue database" + ), diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py new file mode 100644 index 00000000..8fb8fe0e --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221116133500_queue_job_add_force.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import enum +import logging +import types +from typing import Generic, Type, TypeVar + +from mongoengine import Document +from mongoengine.connection import get_db +from mongoengine.fields import BooleanField, DateTimeField, EnumField, StringField +from mongoengine.queryset.queryset import QuerySet + +from mongodb_migration.check import check_documents +from mongodb_migration.migration import Migration + + +# connection already occurred in the main.py (caveat: we use globals) +class MigrationAddForceToJob(Migration): + def up(self) -> None: + # See https://docs.mongoengine.org/guide/migration.html#example-1-addition-of-a-field + logging.info("Add the force field, with the default value (False), to all the jobs") + db = get_db("queue") + db["jobsBlue"].update_many({}, {"$set": {"force": False}}) + + def down(self) -> None: + logging.info("Remove the force field from all the jobs") + db = get_db("queue") + db["jobsBlue"].update_many({}, {"$unset": {"force": ""}}) + + def validate(self) -> None: + logging.info("Ensure that a random selection of jobs have the 'force' field set to False") + + def custom_validation(doc: JobSnapshot) -> None: + if doc.force: + raise ValueError("force should be False") + + check_documents(DocCls=JobSnapshot, sample_size=10, custom_validation=custom_validation) + if JobSnapshot.objects(force=False).count() != JobSnapshot.objects.count(): + raise ValueError('All the objects should have the "force" field, set to False') + + +# --- JobSnapshot --- +# copied from libqueue.queue.Job, as a snapshot of when the migration was created +class Status(enum.Enum): + WAITING = "waiting" + STARTED = "started" + SUCCESS = "success" + ERROR = "error" + CANCELLED = "cancelled" + SKIPPED = "skipped" + + +# START monkey patching ### hack ### +# see https://github.com/sbdchd/mongo-types#install +U = TypeVar("U", bound=Document) + + +def no_op(self, x): # type: ignore + return self + + +QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) + + +class QuerySetManager(Generic[U]): + def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: + return QuerySet(cls, cls._get_collection()) + + +# END monkey patching ### hack ### + + +class JobSnapshot(Document): + """A job in the mongoDB database + + Args: + type (`str`): The type of the job, identifies the queue + dataset (`str`): The dataset on which to apply the job. + config (`str`, optional): The config on which to apply the job. + split (`str`, optional): The config on which to apply the job. + unicity_id (`str`): A string that identifies the job uniquely. Only one job with the same unicity_id can be in + the started state. + namespace (`str`): The dataset namespace (user or organization) if any, else the dataset name (canonical name). + force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. + status (`Status`, optional): The status of the job. Defaults to Status.WAITING. + created_at (`datetime`): The creation date of the job. + started_at (`datetime`, optional): When the job has started. + finished_at (`datetime`, optional): When the job has finished. + """ + + meta = { + "collection": "jobsBlue", + "db_alias": "queue", + "indexes": [ + "status", + ("type", "status"), + ("type", "dataset", "status"), + ("type", "dataset", "config", "split", "status"), + ("status", "type", "created_at", "namespace"), + "-created_at", + ], + } + type = StringField(required=True) + dataset = StringField(required=True) + config = StringField() + split = StringField() + unicity_id = StringField(required=True) + namespace = StringField(required=True) + force = BooleanField(default=False) + status = EnumField(Status, default=Status.WAITING) + created_at = DateTimeField(required=True) + started_at = DateTimeField() + finished_at = DateTimeField() + + objects = QuerySetManager["JobSnapshot"]() diff --git a/jobs/mongodb_migration/tests/test_collector.py b/jobs/mongodb_migration/tests/test_collector.py index 2c425a0a..ae651857 100644 --- a/jobs/mongodb_migration/tests/test_collector.py +++ b/jobs/mongodb_migration/tests/test_collector.py @@ -10 +10 @@ def test_collector(): - assert len(migrations) == 1 + assert len(migrations) >= 1 diff --git a/libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl new file mode 100644 index 00000000..a0de00e7 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.10.tar.gz b/libs/libqueue/dist/libqueue-0.4.10.tar.gz new file mode 100644 index 00000000..a2ea9a22 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.10.tar.gz differ diff --git a/libs/libqueue/dist/libqueue-0.4.9-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.9-py3-none-any.whl new file mode 100644 index 00000000..014af2e9 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.9-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.9.tar.gz b/libs/libqueue/dist/libqueue-0.4.9.tar.gz new file mode 100644 index 00000000..f5ad48d6 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.9.tar.gz differ diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index ebd57ae5..c95ddcf1 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.4.8" +version = "0.4.10" diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index c7a8e501..401e200e 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -11 +11 @@ from operator import itemgetter -from typing import Generic, List, Literal, Optional, Tuple, Type, TypedDict, TypeVar +from typing import Generic, List, Literal, Optional, Type, TypedDict, TypeVar @@ -14 +14 @@ from mongoengine import Document, DoesNotExist, connect -from mongoengine.fields import DateTimeField, EnumField, StringField +from mongoengine.fields import BooleanField, DateTimeField, EnumField, StringField @@ -52,0 +53 @@ class JobDict(TypedDict): + force: bool @@ -58,0 +60,8 @@ class JobDict(TypedDict): +class StartedJobInfo(TypedDict): + job_id: str + dataset: str + config: Optional[str] + split: Optional[str] + force: bool + + @@ -101,0 +111 @@ class Job(Document): + force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. @@ -126 +136 @@ class Job(Document): - split = StringField() + force = BooleanField(default=False) @@ -139,0 +150 @@ class Job(Document): + "force": self.force, @@ -178 +189,3 @@ class Queue: - def add_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> Job: + def add_job( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> Job: @@ -180,0 +194,6 @@ class Queue: + Args: + dataset (`str`): The dataset on which to apply the job. + config (`str`, optional): The config on which to apply the job. + split (`str`, optional): The config on which to apply the job. + force (`bool`, optional): If True, the job SHOULD not be skipped. Defaults to False. + @@ -189,0 +209 @@ class Queue: + force=force, @@ -218 +238 @@ class Queue: - .only("dataset", "config", "split") + .only("dataset", "config", "split", "force") @@ -253 +273 @@ class Queue: - .only("dataset", "config", "split") + .only("dataset", "config", "split", "force") @@ -263 +283 @@ class Queue: - def start_job(self) -> Tuple[str, str, Optional[str], Optional[str]]: + def start_job(self) -> StartedJobInfo: @@ -277,2 +297,7 @@ class Queue: - return str(next_waiting_job.pk), next_waiting_job.dataset, next_waiting_job.config, next_waiting_job.split - # ^ job.pk is the id. job.id is not recognized by mypy + return { + "job_id": str(next_waiting_job.pk), # job.pk is the id. job.id is not recognized by mypy + "dataset": next_waiting_job.dataset, + "config": next_waiting_job.config, + "split": next_waiting_job.split, + "force": next_waiting_job.force, + } diff --git a/libs/libqueue/src/libqueue/worker.py b/libs/libqueue/src/libqueue/worker.py index 0c9c3d0c..4154132a 100644 --- a/libs/libqueue/src/libqueue/worker.py +++ b/libs/libqueue/src/libqueue/worker.py @@ -83 +83,6 @@ class Worker(ABC): - job_id, dataset, config, split = self.queue.start_job() + started_job_info = self.queue.start_job() + job_id = started_job_info["job_id"] + dataset = started_job_info["dataset"] + config = started_job_info["config"] + split = started_job_info["split"] + force = started_job_info["force"] @@ -95 +100 @@ class Worker(ABC): - if self.should_skip_job(dataset=dataset, config=config, split=split) + if self.should_skip_job(dataset=dataset, config=config, split=split, force=force) @@ -97,5 +102 @@ class Worker(ABC): - if self.compute( - dataset=dataset, - config=config, - split=split, - ) + if self.compute(dataset=dataset, config=config, split=split, force=force) @@ -135,0 +137 @@ class Worker(ABC): + force: bool = False, @@ -144,0 +147 @@ class Worker(ABC): + force: bool = False, diff --git a/libs/libqueue/tests/test_queue.py b/libs/libqueue/tests/test_queue.py index 4378402c..26d771ed 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libqueue/tests/test_queue.py @@ -22 +22 @@ def test_add_job() -> None: - queue.add_job(dataset=test_dataset) + queue.add_job(dataset=test_dataset, force=True) @@ -27,4 +27,5 @@ def test_add_job() -> None: - job_id, dataset, config, split = queue.start_job() - assert dataset == test_dataset - assert config is None - assert split is None + started_job_info = queue.start_job() + assert started_job_info["dataset"] == test_dataset + assert started_job_info["config"] is None + assert started_job_info["split"] is None + assert started_job_info["force"] is True @@ -34 +35 @@ def test_add_job() -> None: - queue.add_job(dataset=test_dataset) + queue.add_job(dataset=test_dataset, force=True) @@ -39 +40 @@ def test_add_job() -> None: - queue.finish_job(job_id=job_id, finished_status=Status.SUCCESS) + queue.finish_job(job_id=started_job_info["job_id"], finished_status=Status.SUCCESS) @@ -43,2 +44,3 @@ def test_add_job() -> None: - job_id, *_ = queue.start_job() - queue.finish_job(job_id=job_id, finished_status=Status.SUCCESS) + started_job_info = queue.start_job() + assert started_job_info["force"] is False + queue.finish_job(job_id=started_job_info["job_id"], finished_status=Status.SUCCESS) @@ -46,2 +48,3 @@ def test_add_job() -> None: - job_id, *_ = queue.start_job() - other_job_id = ("1" if job_id[0] == "0" else "0") + job_id[1:] + started_job_info = queue.start_job() + assert started_job_info["force"] is True + other_job_id = ("1" if started_job_info["job_id"][0] == "0" else "0") + started_job_info["job_id"][1:] @@ -51 +54 @@ def test_add_job() -> None: - queue.finish_job(job_id=job_id, finished_status=Status.SUCCESS) + queue.finish_job(job_id=started_job_info["job_id"], finished_status=Status.SUCCESS) @@ -69,18 +72,18 @@ def test_priority_to_non_started_datasets() -> None: - _, dataset, __, split = queue.start_job() - assert dataset == "dataset1" - assert split == "split1" - _, dataset, __, split = queue.start_job() - assert dataset == "dataset2" - assert split == "split1" - _, dataset, __, split = queue.start_job() - assert dataset == "dataset3" - assert split == "split1" - _, dataset, __, split = queue.start_job() - assert dataset == "dataset1/dataset" - assert split == "split1" - _, dataset, __, split = queue.start_job() - assert dataset == "dataset2" - assert split == "split2" - _, dataset, __, split = queue.start_job() - assert dataset == "dataset1" - assert split == "split2" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == "dataset1" + assert started_job_info["split"] == "split1" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == "dataset2" + assert started_job_info["split"] == "split1" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == "dataset3" + assert started_job_info["split"] == "split1" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == "dataset1/dataset" + assert started_job_info["split"] == "split1" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == "dataset2" + assert started_job_info["split"] == "split2" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == "dataset1" + assert started_job_info["split"] == "split2" @@ -104,4 +107,4 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - job_id, dataset, config, split = queue.start_job() - assert dataset == test_dataset - assert config == test_config - assert split == "split1" + started_job_info = queue.start_job() + assert started_job_info["dataset"] == test_dataset + assert started_job_info["config"] == test_config + assert started_job_info["split"] == "split1" @@ -113,2 +116,2 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - _, dataset, config, split = queue.start_job() - assert split == "split2" + started_job_info_2 = queue.start_job() + assert started_job_info_2["split"] == "split2" @@ -121 +124 @@ def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: - queue.finish_job(job_id, finished_status=Status.SUCCESS) + queue.finish_job(started_job_info["job_id"], finished_status=Status.SUCCESS) diff --git a/libs/libqueue/tests/utils.py b/libs/libqueue/tests/utils.py index dc79e080..46298eb2 100644 --- a/libs/libqueue/tests/utils.py +++ b/libs/libqueue/tests/utils.py @@ -16,2 +16,4 @@ class DummyWorker(Worker): - def should_skip_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> bool: - return super().should_skip_job(dataset, config, split) + def should_skip_job( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> bool: + return super().should_skip_job(dataset=dataset, config=config, split=split, force=force) @@ -20,4 +22 @@ class DummyWorker(Worker): - self, - dataset: str, - config: Optional[str] = None, - split: Optional[str] = None, + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False @@ -25 +24 @@ class DummyWorker(Worker): - return super().compute(dataset, config, split) + return super().compute(dataset=dataset, config=config, split=split, force=force) diff --git a/services/admin/README.md b/services/admin/README.md index a57ed16a..44310e50 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -48 +48,4 @@ The admin service provides endpoints: -- `/cache-reports`: give detailed reports on the content of the cache +- `/cache-reports`: give detailed reports on the content of the cache: + - `/cache-reports/features` + - `/cache-reports/first-rows` + - `/cache-reports/splits` @@ -49,0 +53,3 @@ The admin service provides endpoints: +- `/force-refresh`: force refresh cache entries. It's a POST endpoint: + - `/force-refresh/splits?dataset={dataset}` + - `/force-refresh/first-rows?dataset={dataset}&config={config}&split={split}` diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index bd4bcde5..be787642 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -345 +345 @@ name = "libqueue" -version = "0.4.8" +version = "0.4.10" @@ -361 +361 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl" @@ -514 +514 @@ name = "psutil" -version = "5.9.2" +version = "5.9.4" @@ -857 +857 @@ python-versions = "3.9.6" -content-hash = "6de5688374b0debaceff7658a2a4ec3f6cf0b780aba68a68a0034cc3250725ef" +content-hash = "1c5f9363c3a770b15492b638f5099c237841884af6bb9209174fb7d46de3b82e" @@ -924 +924 @@ libqueue = [ - {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, + {file = "libqueue-0.4.10-py3-none-any.whl", hash = "sha256:38d15ed5797cf2e90d11e09e2570e4430ee774496d2bb54f19530b5466f33690"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 843fe201..889bbe3c 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 058ed184..da98937b 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -14,0 +15,4 @@ from admin.routes.cache_reports import create_cache_reports_endpoint +from admin.routes.force_refresh_first_rows import ( + create_force_refresh_first_rows_endpoint, +) +from admin.routes.force_refresh_splits import create_force_refresh_splits_endpoint @@ -32,0 +37,20 @@ def create_app() -> Starlette: + Route( + "/force-refresh/first-rows", + endpoint=create_force_refresh_first_rows_endpoint( + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + methods=["POST"], + ), + Route( + "/force-refresh/splits", + endpoint=create_force_refresh_splits_endpoint( + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + methods=["POST"], + ), diff --git a/services/admin/src/admin/dataset.py b/services/admin/src/admin/dataset.py new file mode 100644 index 00000000..9654ea94 --- /dev/null +++ b/services/admin/src/admin/dataset.py @@ -0,0 +1,57 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from huggingface_hub.hf_api import HfApi # type: ignore +from huggingface_hub.utils import RepositoryNotFoundError # type: ignore +from libcache.simple_cache import ( + mark_first_rows_responses_as_stale, + mark_splits_responses_as_stale, +) +from libqueue.queue import Queue + +from admin.utils import JobType + +splits_queue = Queue(type=JobType.SPLITS.value) +first_rows_queue = Queue(type=JobType.FIRST_ROWS.value) + + +def is_supported( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> bool: + """ + Check if the dataset exists on the Hub and is supported by the datasets-server. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + [`bool`]: True if the dataset is supported by the datasets-server. + """ + try: + # note that token is required to access gated dataset info + info = HfApi(endpoint=hf_endpoint).dataset_info(dataset, token=hf_token) + except RepositoryNotFoundError: + return False + return info.private is False + + +def update_splits(dataset: str, force: bool = False) -> None: + logging.debug(f"refresh /splits for {dataset}") + mark_splits_responses_as_stale(dataset_name=dataset) + mark_first_rows_responses_as_stale(dataset_name=dataset) + splits_queue.add_job(dataset=dataset, force=force) + + +def update_first_rows(dataset: str, config: str, split: str, force: bool = False) -> None: + logging.debug(f"refresh /first-rows for {dataset}, {config}, {split}") + mark_first_rows_responses_as_stale(dataset_name=dataset, config_name=config, split_name=split) + first_rows_queue.add_job(dataset=dataset, config=config, split=split, force=force) diff --git a/services/admin/src/admin/routes/force_refresh_first_rows.py b/services/admin/src/admin/routes/force_refresh_first_rows.py new file mode 100644 index 00000000..eed967cd --- /dev/null +++ b/services/admin/src/admin/routes/force_refresh_first_rows.py @@ -0,0 +1,53 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from starlette.requests import Request +from starlette.responses import Response + +from admin.authentication import auth_check +from admin.dataset import is_supported, update_first_rows +from admin.utils import ( + AdminCustomError, + Endpoint, + MissingRequiredParameterError, + UnexpectedError, + UnsupportedDatasetError, + are_valid_parameters, + get_json_admin_error_response, + get_json_ok_response, +) + + +def create_force_refresh_first_rows_endpoint( + hf_endpoint: str, + hf_token: Optional[str] = None, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, +) -> Endpoint: + async def force_refresh_first_rows_endpoint(request: Request) -> Response: + try: + dataset = request.query_params.get("dataset") + config = request.query_params.get("config") + split = request.query_params.get("split") + logging.info(f"/force-refresh/first-rows, dataset={dataset}, config={config}, split={split}") + + if not are_valid_parameters([dataset, config, split]): + raise MissingRequiredParameterError("Parameters 'dataset', 'config' and 'split' are required") + # if auth_check fails, it will raise an exception that will be caught below + auth_check(external_auth_url=external_auth_url, request=request, organization=organization) + if not is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): + raise UnsupportedDatasetError(f"Dataset '{dataset}' is not supported.") + update_first_rows(dataset=dataset, config=config, split=split, force=True) + return get_json_ok_response( + {"status": "ok"}, + max_age=0, + ) + except AdminCustomError as e: + return get_json_admin_error_response(e, max_age=0) + except Exception: + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=0) + + return force_refresh_first_rows_endpoint diff --git a/services/admin/src/admin/routes/force_refresh_splits.py b/services/admin/src/admin/routes/force_refresh_splits.py new file mode 100644 index 00000000..6f642b47 --- /dev/null +++ b/services/admin/src/admin/routes/force_refresh_splits.py @@ -0,0 +1,51 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import Optional + +from starlette.requests import Request +from starlette.responses import Response + +from admin.authentication import auth_check +from admin.dataset import is_supported, update_splits +from admin.utils import ( + AdminCustomError, + Endpoint, + MissingRequiredParameterError, + UnexpectedError, + UnsupportedDatasetError, + are_valid_parameters, + get_json_admin_error_response, + get_json_ok_response, +) + + +def create_force_refresh_splits_endpoint( + hf_endpoint: str, + hf_token: Optional[str] = None, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, +) -> Endpoint: + async def force_refresh_splits_endpoint(request: Request) -> Response: + try: + dataset = request.query_params.get("dataset") + logging.info("/force-refresh/splits, dataset={dataset}") + + if not are_valid_parameters([dataset]): + raise MissingRequiredParameterError("Parameter 'dataset' is required") + # if auth_check fails, it will raise an exception that will be caught below + auth_check(external_auth_url=external_auth_url, request=request, organization=organization) + if not is_supported(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token): + raise UnsupportedDatasetError(f"Dataset '{dataset}' is not supported.") + update_splits(dataset=dataset, force=True) + return get_json_ok_response( + {"status": "ok"}, + max_age=0, + ) + except AdminCustomError as e: + return get_json_admin_error_response(e, max_age=0) + except Exception: + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=0) + + return force_refresh_splits_endpoint diff --git a/services/admin/src/admin/utils.py b/services/admin/src/admin/utils.py index c9e7a058..787e4065 100644 --- a/services/admin/src/admin/utils.py +++ b/services/admin/src/admin/utils.py @@ -6 +6 @@ from http import HTTPStatus -from typing import Any, Callable, Coroutine, Literal, Optional +from typing import Any, Callable, Coroutine, List, Literal, Optional @@ -14 +14,6 @@ AdminErrorCode = Literal[ - "InvalidParameter", "UnexpectedError", "ExternalUnauthenticatedError", "ExternalAuthenticatedError" + "MissingRequiredParameter", + "InvalidParameter", + "UnsupportedDatasetError", + "UnexpectedError", + "ExternalUnauthenticatedError", + "ExternalAuthenticatedError", @@ -31,0 +37,7 @@ class AdminCustomError(CustomError): +class MissingRequiredParameterError(AdminCustomError): + """Raised when a required parameter is missing.""" + + def __init__(self, message: str): + super().__init__(message, HTTPStatus.UNPROCESSABLE_ENTITY, "MissingRequiredParameter") + + @@ -38,0 +51,7 @@ class InvalidParameterError(AdminCustomError): +class UnsupportedDatasetError(AdminCustomError): + """Raised when a dataset is not supported (private dataset, for example).""" + + def __init__(self, message: str): + super().__init__(message, HTTPStatus.NOT_IMPLEMENTED, "UnsupportedDatasetError") + + @@ -94,0 +114,8 @@ def get_json_admin_error_response(error: AdminCustomError, max_age: int) -> Resp +def is_non_empty_string(string: Any) -> bool: + return isinstance(string, str) and bool(string and string.strip()) + + +def are_valid_parameters(parameters: List[Any]) -> bool: + return all(is_non_empty_string(s) for s in parameters) + + diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 182d1920..6e7efbe0 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -361 +361 @@ name = "libqueue" -version = "0.4.8" +version = "0.4.10" @@ -377 +377 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl" @@ -899 +899 @@ python-versions = "3.9.6" -content-hash = "5c145b2208384d3c4c41fcf1e88adc2c189c7bc16b5622427c6fc87fa81169aa" +content-hash = "b6605d97e1dc8c57c112f61b61bb7aece4369555bb43995d5830daa41064b86b" @@ -964 +964 @@ libqueue = [ - {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, + {file = "libqueue-0.4.10-py3-none-any.whl", hash = "sha256:38d15ed5797cf2e90d11e09e2570e4430ee774496d2bb54f19530b5466f33690"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 08bd47c5..3db6460d 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl", develop = false } diff --git a/services/api/src/api/dataset.py b/services/api/src/api/dataset.py index 02ef31bb..7ba89ff3 100644 --- a/services/api/src/api/dataset.py +++ b/services/api/src/api/dataset.py @@ -22,0 +23 @@ splits_queue = Queue(type=JobType.SPLITS.value) +first_rows_queue = Queue(type=JobType.FIRST_ROWS.value) @@ -51 +52 @@ def is_supported( -def update(dataset: str) -> None: +def update(dataset: str, force: bool = False) -> None: @@ -55 +56 @@ def update(dataset: str) -> None: - splits_queue.add_job(dataset=dataset) + splits_queue.add_job(dataset=dataset, force=force) @@ -69,0 +71 @@ def is_splits_in_process( + # the /splits response is not ready yet @@ -72 +74,2 @@ def is_splits_in_process( - update(dataset=dataset) + # the dataset is supported, let's refresh it + update(dataset=dataset, force=False) @@ -80 +83,2 @@ def is_first_rows_in_process( - if splits_queue.is_job_in_process(dataset=dataset, config=config, split=split): + if first_rows_queue.is_job_in_process(dataset=dataset, config=config, split=split): + # the /first-rows response is not ready yet @@ -85,0 +90 @@ def is_first_rows_in_process( + # the /splits response is not ready yet @@ -94,0 +100 @@ def is_first_rows_in_process( + # Note that we "force" the refresh @@ -98 +104 @@ def is_first_rows_in_process( - update(dataset=dataset) + update(dataset=dataset, force=True) diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index 416149b3..628d4d57 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -66 +66 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - update(dataset=dataset) + update(dataset=dataset, force=False) @@ -75 +75 @@ def process_payload(payload: MoonWebhookV2Payload, hf_endpoint: str, hf_token: O - update(dataset=moved_to) + update(dataset=moved_to, force=False) diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index aad63417..1ec8181c 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -869 +869 @@ name = "libqueue" -version = "0.4.8" +version = "0.4.10" @@ -885 +885 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl" @@ -1319 +1319 @@ name = "psutil" -version = "5.9.3" +version = "5.9.4" @@ -2288 +2288 @@ python-versions = "3.9.6" -content-hash = "dddb081818eeb960f20dcdfeb38b8b7ab5c5d5c466e07e6ff3bce681357f661b" +content-hash = "7ad5f0d9ac949dab60378f6974037a6a2deed213755f83064b8450d2cea6d55f" @@ -2625 +2625 @@ libqueue = [ - {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, + {file = "libqueue-0.4.10-py3-none-any.whl", hash = "sha256:38d15ed5797cf2e90d11e09e2570e4430ee774496d2bb54f19530b5466f33690"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index bdc6abfa..f21dee06 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "^2.6.0" } +datasets = { extras = ["audio", "vision"], version = "~2.6.0" } @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index 7db7001c..bff0214c 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -36 +36,3 @@ class FirstRowsWorker(Worker): - def should_skip_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> bool: + def should_skip_job( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> bool: @@ -40 +42,2 @@ class FirstRowsWorker(Worker): - - a cache entry exists for the dataset + - force is False + - and a cache entry exists for the dataset @@ -48,0 +52 @@ class FirstRowsWorker(Worker): + force (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to force the job to be run. @@ -53 +57 @@ class FirstRowsWorker(Worker): - if config is None or split is None: + if force or config is None or split is None: @@ -74,0 +79 @@ class FirstRowsWorker(Worker): + force: bool = False, diff --git a/workers/first_rows/tests/test_worker.py b/workers/first_rows/tests/test_worker.py index ef6ef938..78475bfb 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/first_rows/tests/test_worker.py @@ -29,0 +30,9 @@ def worker(worker_config: WorkerConfig) -> FirstRowsWorker: +def should_skip_job(worker: FirstRowsWorker, hub_public_csv: str) -> None: + dataset, config, split = get_default_config_split(hub_public_csv) + assert worker.should_skip_job(dataset=dataset, config=config, split=split) is False + # we add an entry to the cache + worker.compute(dataset=dataset, config=config, split=split) + assert worker.should_skip_job(dataset=dataset, config=config, split=split) is True + assert worker.should_skip_job(dataset=dataset, config=config, split=split, force=False) is False + + diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index aad63417..63617fbf 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -351 +351 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -357 +357 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -869 +869 @@ name = "libqueue" -version = "0.4.8" +version = "0.4.10" @@ -885 +885 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl" @@ -1319 +1319 @@ name = "psutil" -version = "5.9.3" +version = "5.9.4" @@ -2288 +2288 @@ python-versions = "3.9.6" -content-hash = "dddb081818eeb960f20dcdfeb38b8b7ab5c5d5c466e07e6ff3bce681357f661b" +content-hash = "7ad5f0d9ac949dab60378f6974037a6a2deed213755f83064b8450d2cea6d55f" @@ -2625 +2625 @@ libqueue = [ - {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, + {file = "libqueue-0.4.10-py3-none-any.whl", hash = "sha256:38d15ed5797cf2e90d11e09e2570e4430ee774496d2bb54f19530b5466f33690"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index b608dd18..7c24a63a 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -15 +15 @@ conllu = "^4.4.1" -datasets = { extras = ["audio", "vision"], version = "^2.6.0" } +datasets = { extras = ["audio", "vision"], version = "~2.6.0" } @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.10-py3-none-any.whl", develop = false } diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index 54f3d252..4674f4b5 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -39 +39,3 @@ class SplitsWorker(Worker): - def should_skip_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> bool: + def should_skip_job( + self, dataset: str, config: Optional[str] = None, split: Optional[str] = None, force: bool = False + ) -> bool: @@ -43 +45,2 @@ class SplitsWorker(Worker): - - a cache entry exists for the dataset + - force is False + - and a cache entry exists for the dataset @@ -51,0 +55 @@ class SplitsWorker(Worker): + force (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to force the job to be run. @@ -55,0 +60,2 @@ class SplitsWorker(Worker): + if force: + return False @@ -75,0 +82 @@ class SplitsWorker(Worker): + force: bool = False, @@ -101 +108,2 @@ class SplitsWorker(Worker): - self._queues.first_rows.add_job(dataset=d, config=c, split=s) + # we force the refresh of the /first_rows responses if the /splits refresh was forced + self._queues.first_rows.add_job(dataset=d, config=c, split=s, force=force) diff --git a/workers/splits/tests/test_worker.py b/workers/splits/tests/test_worker.py index 7bab4d15..f8e35865 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/splits/tests/test_worker.py @@ -38,0 +39 @@ def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: + assert worker.should_skip_job(dataset=dataset, force=True) is False
3faebee1d14e5d32e65e516a81d397f672598e1c
Mishig
2022-11-16T10:44:53
Revert "Update pr docs actions (#632)" (#641)
diff --git a/.github/workflows/doc-pr-build.yml b/.github/workflows/doc-pr-build.yml index fe01c960..f962ada5 100644 --- a/.github/workflows/doc-pr-build.yml +++ b/.github/workflows/doc-pr-build.yml @@ -18 +18 @@ jobs: - uses: huggingface/doc-builder/.github/workflows/build_pr_documentation.yml@use_hf_hub + uses: huggingface/doc-builder/.github/workflows/build_pr_documentation.yml@main @@ -25,2 +25 @@ jobs: - token: ${{ secrets.HF_DOC_PUSH }} - comment_bot_token: ${{ secrets.HUGGINGFACE_PUSH }} + token: ${{ secrets.HUGGINGFACE_PUSH }} diff --git a/.github/workflows/doc-pr-delete.yml b/.github/workflows/doc-pr-delete.yml index b2426810..a4a39a4a 100644 --- a/.github/workflows/doc-pr-delete.yml +++ b/.github/workflows/doc-pr-delete.yml @@ -13 +13 @@ jobs: - uses: huggingface/doc-builder/.github/workflows/delete_doc_comment.yml@use_hf_hub + uses: huggingface/doc-builder/.github/workflows/delete_doc_comment.yml@main @@ -18,3 +18 @@ jobs: - token: ${{ secrets.HF_DOC_PUSH }} - comment_bot_token: ${{ secrets.HUGGINGFACE_PUSH }} - + token: ${{ secrets.HUGGINGFACE_PUSH }}
ef930e85f3702c0c110e13fd60f76fa58e39db1b
Sylvain Lesage
2022-11-16T09:53:25
feat: 🎸 update dependencies to fix vulnerabilities (#639)
diff --git a/e2e/poetry.lock b/e2e/poetry.lock index 9b087d3e..3f42b0ad 100644 --- a/e2e/poetry.lock +++ b/e2e/poetry.lock @@ -1,8 +0,0 @@ -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -117,0 +110,11 @@ conda = ["pyyaml"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -362,8 +364,0 @@ tomlkit = ">=0.7.2,<0.8.0" -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -407 +402 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -411 +406 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -414 +408,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -416,0 +411 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -420,2 +415 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -424 +418 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -525 +519 @@ description = "A lil' TOML parser" -category = "dev" +category = "main" @@ -586 +580 @@ python-versions = "3.9.6" -content-hash = "323da1fd11fc2760d0f1390619427a2e1afc578232ad2074c72578ce13291f5f" +content-hash = "c70d795e1d1e5f16294299e275c95212b13269a7a985b515448b6872c70098c7" @@ -589,4 +582,0 @@ content-hash = "323da1fd11fc2760d0f1390619427a2e1afc578232ad2074c72578ce13291f5f -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] @@ -642,0 +633 @@ dparse = [] +exceptiongroup = [] @@ -731,4 +721,0 @@ poetryup = [ -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -770,4 +757 @@ pyrsistent = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/e2e/pyproject.toml b/e2e/pyproject.toml index 368b7352..bbee6b98 100644 --- a/e2e/pyproject.toml +++ b/e2e/pyproject.toml @@ -10 +10 @@ openapi-spec-validator = "^0.4.0" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock index 986ac3ff..d3360a0a 100644 --- a/jobs/mongodb_migration/poetry.lock +++ b/jobs/mongodb_migration/poetry.lock @@ -9,8 +8,0 @@ python-versions = "*" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -166,0 +159,11 @@ tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -436,8 +438,0 @@ test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -494 +489 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -498 +493 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -501 +495,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -503,0 +498 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -507,2 +502 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -511 +505 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -683 +677 @@ python-versions = "3.9.6" -content-hash = "037d3a8342a3cc4f2add1dcb12f50eeee5c3d0a62a733fb3472c17fe258fff1b" +content-hash = "1406e01f36b3a97a1ac5a6e5ede76e0a63583893e59435680b678d629e8ffb46" @@ -690 +683,0 @@ appdirs = [ -atomicwrites = [] @@ -710,0 +704 @@ environs = [] +exceptiongroup = [] @@ -793,4 +786,0 @@ psutil = [] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -810,4 +800 @@ pyparsing = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml index dbeffbdf..b4670064 100644 --- a/jobs/mongodb_migration/pyproject.toml +++ b/jobs/mongodb_migration/pyproject.toml @@ -22 +22 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/libs/libcache/poetry.lock b/libs/libcache/poetry.lock index c377113a..a7c526ac 100644 --- a/libs/libcache/poetry.lock +++ b/libs/libcache/poetry.lock @@ -9,8 +8,0 @@ python-versions = "*" -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -166,0 +159,11 @@ tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -362,8 +364,0 @@ tomlkit = ">=0.7.2,<0.8.0" -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -420 +415 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -424 +419 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -427 +421,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -429,0 +424 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -433,2 +428 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -437 +431 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -609 +603 @@ python-versions = "3.9.6" -content-hash = "56acd504dbc619f334ffaff80146c1e7564234fc8d903a6e2c93f2a0ec3db22a" +content-hash = "886e7d495d7668a380a144719cbbb13dee94071c1a0821e5f01db651570ad94e" @@ -616,4 +609,0 @@ appdirs = [ -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] @@ -717,0 +708 @@ environs = [] +exceptiongroup = [] @@ -807,4 +797,0 @@ poetryup = [ -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -932,4 +919 @@ pyparsing = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index 43f617c2..ed1dfe0b 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -23 +23 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/libs/libcommon/poetry.lock b/libs/libcommon/poetry.lock index 80ad9fc4..a118555a 100644 --- a/libs/libcommon/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -1,8 +0,0 @@ -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -146,0 +139,11 @@ tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -331,8 +333,0 @@ tomlkit = ">=0.7.2,<0.8.0" -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -368 +363 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -372 +367 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -375 +369,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -377,0 +372 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -381,2 +376 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -385 +379 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -557 +551 @@ python-versions = "3.9.6" -content-hash = "7b89504d8d03a71e3927157f76aeb95ca9ed3ff00bea023000b18ac70f679c13" +content-hash = "9fdb339c610f256719640f9c28eb4b2e997e0c0e95bd4d4faf0d456c3fe5fdb7" @@ -560,4 +553,0 @@ content-hash = "7b89504d8d03a71e3927157f76aeb95ca9ed3ff00bea023000b18ac70f679c13 -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] @@ -657,0 +648 @@ environs = [] +exceptiongroup = [] @@ -774,4 +764,0 @@ poetryup = [ -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -790,4 +777 @@ pyparsing = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 00279f56..8b2009f6 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -20 +20 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/libs/libqueue/poetry.lock b/libs/libqueue/poetry.lock index 007dc1d4..d197a762 100644 --- a/libs/libqueue/poetry.lock +++ b/libs/libqueue/poetry.lock @@ -1,8 +0,0 @@ -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -158,0 +151,11 @@ tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -365,8 +367,0 @@ test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -423 +418 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -427 +422 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -430 +424,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -432,0 +427 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -436,2 +431 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -440 +434 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -620 +614 @@ python-versions = "3.9.6" -content-hash = "6fd9055ec2fc0f191dd3fb2848cc44b721eddd0caccb377ddc6a73274a06a214" +content-hash = "6e790bfb9d260fa2d0aa4038de4f3f743b9a3d07d0da044c7f7d0ad3367262c8" @@ -623,4 +616,0 @@ content-hash = "6fd9055ec2fc0f191dd3fb2848cc44b721eddd0caccb377ddc6a73274a06a214 -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] @@ -724,0 +715 @@ environs = [] +exceptiongroup = [] @@ -815,4 +805,0 @@ psutil = [] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -940,4 +927 @@ pyparsing = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index 8908d83a..ebd57ae5 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -24 +24 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 037508d9..bd4bcde5 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -37,8 +36,0 @@ tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -194,0 +187,11 @@ tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -520,8 +522,0 @@ test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -578 +573 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -582 +577 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -585 +579,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -587,0 +582 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -591,2 +586 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -595 +589 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -863 +857 @@ python-versions = "3.9.6" -content-hash = "e3da52de7c61f34cce97d1fc7975a6a757547cb6b6932a016b4b261a49befb2d" +content-hash = "6de5688374b0debaceff7658a2a4ec3f6cf0b780aba68a68a0034cc3250725ef" @@ -878 +871,0 @@ asgiref = [ -atomicwrites = [] @@ -901,0 +895 @@ environs = [] +exceptiongroup = [] @@ -994,4 +987,0 @@ psutil = [] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -1119,4 +1109 @@ pyparsing = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 608827de..843fe201 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -27 +27 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 6f95b2b5..182d1920 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -37,8 +36,0 @@ tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -194,0 +187,11 @@ tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -544,8 +546,0 @@ test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -610 +605 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -614 +609 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -617 +611,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -619,0 +614 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -623,2 +618 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -627 +621 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -905 +899 @@ python-versions = "3.9.6" -content-hash = "1231a8dff4cb257231da7de69a8b0ec09597a935dc8edf207470b39c3f590adb" +content-hash = "5c145b2208384d3c4c41fcf1e88adc2c189c7bc16b5622427c6fc87fa81169aa" @@ -917 +910,0 @@ asgiref = [ -atomicwrites = [] @@ -940,0 +934 @@ environs = [] +exceptiongroup = [] @@ -1035,4 +1028,0 @@ psutil = [] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -1183,4 +1173 @@ pyrsistent = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 892564e1..08bd47c5 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -28 +28 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 26b06c36..aad63417 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -105,8 +104,0 @@ python-versions = ">=3.6" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -449,0 +442,11 @@ python-versions = ">=3.6" +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -507 +510 @@ name = "fsspec" -version = "2022.10.0" +version = "2022.11.0" @@ -1229 +1232 @@ name = "pillow" -version = "9.2.0" +version = "9.3.0" @@ -1325,8 +1327,0 @@ test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -1515 +1510 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -1519 +1514 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -1522 +1516,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -1524,0 +1519 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -1528,2 +1523 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -1532 +1526 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -2294 +2288 @@ python-versions = "3.9.6" -content-hash = "d4b3c0e736a68bc46a6e39d50054af6c5d5424952ec118fb9097f6f89aae60f8" +content-hash = "dddb081818eeb960f20dcdfeb38b8b7ab5c5d5c466e07e6ff3bce681357f661b" @@ -2316 +2309,0 @@ async-timeout = [ -atomicwrites = [] @@ -2543,0 +2537 @@ et-xmlfile = [ +exceptiongroup = [] @@ -2820,4 +2813,0 @@ psutil = [] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -3034,4 +3024 @@ pysocks = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 94d4435a..bdc6abfa 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -9 +9 @@ license = "Apache-2.0" -Pillow = "^9.0.0" +Pillow = "^9.3.0" @@ -49 +49 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0" diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index d8327665..aad63417 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -105,8 +104,0 @@ python-versions = ">=3.6" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - @@ -359 +351 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +357 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -449,0 +442,11 @@ python-versions = ">=3.6" +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + @@ -507 +510 @@ name = "fsspec" -version = "2022.10.0" +version = "2022.11.0" @@ -1229 +1232 @@ name = "pillow" -version = "9.2.0" +version = "9.3.0" @@ -1325,8 +1327,0 @@ test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - @@ -1515 +1510 @@ name = "pytest" -version = "6.2.5" +version = "7.2.0" @@ -1519 +1514 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -1522 +1516,0 @@ python-versions = ">=3.6" -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -1524,0 +1519 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} @@ -1528,2 +1523 @@ pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} @@ -1532 +1526 @@ toml = "*" -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -2294 +2288 @@ python-versions = "3.9.6" -content-hash = "d4b3c0e736a68bc46a6e39d50054af6c5d5424952ec118fb9097f6f89aae60f8" +content-hash = "dddb081818eeb960f20dcdfeb38b8b7ab5c5d5c466e07e6ff3bce681357f661b" @@ -2316 +2309,0 @@ async-timeout = [ -atomicwrites = [] @@ -2543,0 +2537 @@ et-xmlfile = [ +exceptiongroup = [] @@ -2820,4 +2813,0 @@ psutil = [] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] @@ -3034,4 +3024 @@ pysocks = [ -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] +pytest = [] diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 0546794e..b608dd18 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -9 +9 @@ license = "Apache-2.0" -Pillow = "^9.0.0" +Pillow = "^9.3.0" @@ -49 +49 @@ poetryup = "^0.3.8" -pytest = "^6.2.5" +pytest = "^7.2.0"
035818f64bfaad897cae95d4e0c7335739246b1b
Sylvain Lesage
2022-11-16T08:58:46
fix: 🐛 fix the truncation (#638)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 205d5849..eaf106a8 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -13 +13 @@ - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-a9824d5" + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-450cc91" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 515750e2..8d8fc05e 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -169,0 +170,3 @@ firstRows: + # Max size of the /first-rows endpoint response in bytes + maxBytes: "200_000" + diff --git a/libs/libcommon/src/libcommon/utils.py b/libs/libcommon/src/libcommon/utils.py index 2005fcb5..a03b6138 100644 --- a/libs/libcommon/src/libcommon/utils.py +++ b/libs/libcommon/src/libcommon/utils.py @@ -12,0 +13,3 @@ def orjson_default(obj: Any) -> Any: + # see https://stackoverflow.com/a/40000564/7351594 for example + # the bytes are encoded with base64, and then decoded as utf-8 + # (ascii only, by the way) to get a string diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 1cadae71..94d4435a 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -59 +59 @@ requires = ["poetry-core>=1.0.0"] -addopts = "-k 'not deprecated'" +# addopts = "-k 'wip'" @@ -62,2 +61,0 @@ markers = [ - "deprecated: tests on deprecated code (deselect with '-m \"not deprecated\"')", - "real_dataset: tests on real datasets (from the Hub)", diff --git a/workers/first_rows/src/first_rows/response.py b/workers/first_rows/src/first_rows/response.py index 25c9983c..9776355d 100644 --- a/workers/first_rows/src/first_rows/response.py +++ b/workers/first_rows/src/first_rows/response.py @@ -6 +5,0 @@ import logging -import sys @@ -95,8 +94,16 @@ def get_rows( -def get_size_in_bytes(obj: Any): - return sys.getsizeof(orjson_dumps(obj)) - # ^^ every row is transformed here in a string, because it corresponds to - # the size the row will contribute in the JSON response to /first-rows endpoint. - # The size of the string is measured in bytes. - # An alternative would have been to look at the memory consumption (pympler) but it's - # less related to what matters here (size of the JSON, number of characters in the - # dataset viewer table on the hub) +def get_json_size(obj: Any) -> int: + """Returns the size of an object in bytes once serialized as JSON + + Args: + obj (Any): the Python object + + Returns: + int: the size of the serialized object in bytes + """ + return len(orjson_dumps(obj)) + + +# from https://stackoverflow.com/a/43848928/7351594 +def utf8_lead_byte(b: int) -> bool: + """A UTF-8 intermediate byte starts with the bits 10xxxxxx.""" + return (b & 0xC0) != 0x80 @@ -105,2 +112,10 @@ def get_size_in_bytes(obj: Any): -def truncate_cell(cell: Any, min_cell_bytes: int) -> str: - return orjson_dumps(cell)[:min_cell_bytes].decode("utf8", "ignore") +def utf8_byte_truncate(text: str, max_bytes: int) -> str: + """If text[max_bytes] is not a lead byte, back up until a lead byte is + found and truncate before that character.""" + utf8 = text.encode("utf8") + if len(utf8) <= max_bytes: + return text + i = max_bytes + while i > 0 and not utf8_lead_byte(utf8[i]): + i -= 1 + return utf8[:i].decode("utf8", "ignore") @@ -113,6 +128,5 @@ def truncate_row_item(row_item: RowItem, min_cell_bytes: int) -> RowItem: - # for now: all the cells, but the smallest ones, are truncated - cell_bytes = get_size_in_bytes(cell) - if cell_bytes > min_cell_bytes: - row_item["truncated_cells"].append(column_name) - row[column_name] = truncate_cell(cell=cell, min_cell_bytes=min_cell_bytes) - else: + # for now: all the cells above min_cell_bytes are truncated to min_cell_bytes + # it's done by replacing the cell (which can have any type) by a string with + # its JSON serialization, and then truncating it to min_cell_bytes + cell_json = orjson_dumps(cell) + if len(cell_json) <= min_cell_bytes: @@ -119,0 +134,4 @@ def truncate_row_item(row_item: RowItem, min_cell_bytes: int) -> RowItem: + else: + cell_json_str = cell_json.decode("utf8", "ignore") + row_item["truncated_cells"].append(column_name) + row[column_name] = utf8_byte_truncate(text=cell_json_str, max_bytes=min_cell_bytes) @@ -123,0 +142,3 @@ def truncate_row_item(row_item: RowItem, min_cell_bytes: int) -> RowItem: +COMMA_SIZE = 1 # the comma "," is encoded with one byte in utf-8 + + @@ -127 +148 @@ def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_b - rows_bytes = sum(get_size_in_bytes(row_item) for row_item in row_items) + rows_bytes = sum(get_json_size(row_item) for row_item in row_items) + COMMA_SIZE * (len(row_items) - 1) @@ -133 +154 @@ def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_b - previous_size = get_size_in_bytes(row_item) + previous_size = get_json_size(row_item) + COMMA_SIZE @@ -135 +156 @@ def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_b - new_size = get_size_in_bytes(row_item) + new_size = get_json_size(row_item) + COMMA_SIZE @@ -164 +185,2 @@ def create_truncated_row_items( - # - at most rows_max_bytes bytes + # - at most rows_max_bytes bytes. Note that it's the limit to the sum of the rows sizes. The JSON response size + # will be greater, due to the other fields (row_idx, truncated_cells, features, etc.). @@ -169 +191 @@ def create_truncated_row_items( - rows_bytes += get_size_in_bytes(row_item) + rows_bytes += get_json_size(row_item) + COMMA_SIZE @@ -173,0 +196,3 @@ def create_truncated_row_items( + # caveat: the truncation might not be enough to get under the threshold if: + # - the number of columns is too high + # - rows_max_bytes is too low (or even negative) @@ -185 +210 @@ def create_truncated_row_items( - rows_bytes += get_size_in_bytes(row_item) + rows_bytes += get_json_size(row_item) + COMMA_SIZE @@ -450,0 +476,10 @@ def compute_first_rows_response( + # get the size of the surrounding JSON (without the rows) + features_list = to_features_list(dataset=dataset, config=config, split=split, features=features) + response: FirstRowsResponse = { + "dataset": dataset, + "config": config, + "split": split, + "features": features_list, + "rows": [], + } + surrounding_json_size = get_json_size(response) @@ -458 +493 @@ def compute_first_rows_response( - rows_max_bytes=rows_max_bytes, + rows_max_bytes=rows_max_bytes - surrounding_json_size, @@ -460,0 +496 @@ def compute_first_rows_response( + response["rows"] = row_items @@ -463,7 +499 @@ def compute_first_rows_response( - "first_rows_response": { - "dataset": dataset, - "config": config, - "split": split, - "features": to_features_list(dataset, config, split, features), - "rows": row_items, - }, + "first_rows_response": response, diff --git a/workers/first_rows/tests/fixtures/datasets.py b/workers/first_rows/tests/fixtures/datasets.py index 1a07dd2d..c1bc51f9 100644 --- a/workers/first_rows/tests/fixtures/datasets.py +++ b/workers/first_rows/tests/fixtures/datasets.py @@ -129,0 +130 @@ def datasets() -> Dict[str, Dataset]: + "big": Dataset.from_pandas(pd.DataFrame({"col": ["a" * 1_234 for _ in range(4_567)]}, dtype=pd.StringDtype())), diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/first_rows/tests/fixtures/hub.py index a2562d01..fd745c68 100644 --- a/workers/first_rows/tests/fixtures/hub.py +++ b/workers/first_rows/tests/fixtures/hub.py @@ -247,0 +248,8 @@ def hub_public_images_list(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dat [email protected](scope="session", autouse=True) +def hub_public_big(hf_api: HfApi, hf_token: str, datasets: Dict[str, Dataset]) -> Iterable[str]: + repo_id = create_hub_dataset_repo(hf_api=hf_api, hf_token=hf_token, prefix="big", dataset=datasets["big"]) + yield repo_id + with suppress(requests.exceptions.HTTPError, ValueError): + hf_api.delete_repo(repo_id=repo_id, token=hf_token, repo_type="dataset") + + @@ -390,0 +399,7 @@ def get_IMAGES_LIST_rows(dataset: str): +BIG_cols = { + "col": [{"_type": "Value", "dtype": "string"}], +} + +BIG_rows = ["a" * 1_234 for _ in range(4_567)] + + @@ -400,0 +416 @@ def hub_datasets( + hub_public_big, @@ -453,0 +470,5 @@ def hub_datasets( + "big": { + "name": hub_public_big, + "splits_response": create_splits_response(hub_public_big, 0, 1), + "first_rows_response": create_first_rows_response(hub_public_big, BIG_cols, BIG_rows), + }, diff --git a/workers/first_rows/tests/test_response.py b/workers/first_rows/tests/test_response.py index 0e9d3519..639d3718 100644 --- a/workers/first_rows/tests/test_response.py +++ b/workers/first_rows/tests/test_response.py @@ -9 +9 @@ from first_rows.config import WorkerConfig -from first_rows.response import compute_first_rows_response +from first_rows.response import compute_first_rows_response, get_json_size @@ -94,0 +95,38 @@ def test_number_rows( + + [email protected]( + "name,rows_max_bytes,successful_truncation", + [ + # not-truncated public response is 687 bytes + ("public", 10, False), # too small limit, even with truncation + ("public", 1_000, True), # not truncated + # not-truncated big response is 5_885_989 bytes + ("big", 10, False), # too small limit, even with truncation + ("big", 1_000, True), # truncated successfully + ("big", 10_000_000, True), # not truncated + ], +) +def test_truncation( + hub_datasets: HubDatasets, + worker_config: WorkerConfig, + name: str, + rows_max_bytes: int, + successful_truncation: bool, +) -> None: + dataset, config, split = get_default_config_split(hub_datasets[name]["name"]) + response = compute_first_rows_response( + dataset=dataset, + config=config, + split=split, + assets_base_url=worker_config.common.assets_base_url, + hf_endpoint=worker_config.common.hf_endpoint, + hf_token=None, + max_size_fallback=worker_config.first_rows.fallback_max_dataset_size, + rows_max_number=1_000_000, + rows_min_number=10, + rows_max_bytes=rows_max_bytes, + min_cell_bytes=10, + assets_directory=worker_config.cache.assets_directory, + )["first_rows_response"] + print(get_json_size(response)) + assert (get_json_size(response) <= rows_max_bytes) is successful_truncation
0e28690d1160feea92b61b2280f8479bf22dfa7c
Sylvain Lesage
2022-11-15T14:11:15
Add migration job (#636)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index e9a96944..e0eea42e 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -56,2 +56,2 @@ jobs: - IMAGE_ADMIN: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.admin}}" - IMAGE_API: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.api}}" + IMAGE_SERVICE_ADMIN: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.admin}}" + IMAGE_SERVICE_API: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.api}}" diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index fe8757c7..63d6831f 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -14,3 +13,0 @@ on: - secrets: - codecov-token: - required: true diff --git a/.github/workflows/j-migration-build-docker.yml b/.github/workflows/j-migration-build-docker.yml new file mode 100644 index 00000000..bf660447 --- /dev/null +++ b/.github/workflows/j-migration-build-docker.yml @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +name: jobs/mongodb_migration +on: + workflow_dispatch: + push: + paths: + - 'jobs/mongodb_migration/Dockerfile' + - 'jobs/mongodb_migration/src/**' + - 'jobs/mongodb_migration/poetry.lock' + - 'jobs/mongodb_migration/pyproject.toml' + - '.github/workflows/j-mongodb_migration-build-docker.yml' + - '.github/workflows/_build_push_docker_hub.yml' +jobs: + docker: + uses: ./.github/workflows/_build_push_docker_hub.yml + with: + directory: jobs + project: mongodb_migration + secrets: + dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} + dockerhub-password: ${{ secrets.DOCKERHUB_PASSWORD }} diff --git a/.github/workflows/j-migration.yml b/.github/workflows/j-migration.yml new file mode 100644 index 00000000..df8f5f6f --- /dev/null +++ b/.github/workflows/j-migration.yml @@ -0,0 +1,22 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +name: jobs/mongodb_migration +on: + workflow_dispatch: + push: + paths: + - 'jobs/mongodb_migration/**' + - '.github/workflows/j-mongodb_migration.yml' + - '.github/workflows/_quality-python.yml' + - '.github/workflows/_unit-tests-python.yml' + - 'tools/docker-compose-mongo.yml' +jobs: + quality: + uses: ./.github/workflows/_quality-python.yml + with: + working-directory: jobs/mongodb_migration + unit-tests: + uses: ./.github/workflows/_unit-tests-python.yml + with: + working-directory: jobs/mongodb_migration diff --git a/.github/workflows/l-libcache.yml b/.github/workflows/l-libcache.yml index 5981ca8c..592303de 100644 --- a/.github/workflows/l-libcache.yml +++ b/.github/workflows/l-libcache.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -24,2 +22,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/l-libcommon.yml b/.github/workflows/l-libcommon.yml index 2a4d54a8..f8f15511 100644 --- a/.github/workflows/l-libcommon.yml +++ b/.github/workflows/l-libcommon.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -24,2 +22,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/l-libqueue.yml b/.github/workflows/l-libqueue.yml index cfb67194..5477472e 100644 --- a/.github/workflows/l-libqueue.yml +++ b/.github/workflows/l-libqueue.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -24,2 +22,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/s-admin.yml b/.github/workflows/s-admin.yml index ddc57a5d..b8d73835 100644 --- a/.github/workflows/s-admin.yml +++ b/.github/workflows/s-admin.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -24,2 +22,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/s-api.yml b/.github/workflows/s-api.yml index 36f6719b..30eccd73 100644 --- a/.github/workflows/s-api.yml +++ b/.github/workflows/s-api.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -24,2 +22,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/w-first_rows-build-docker.yml b/.github/workflows/w-first_rows-build-docker.yml index 07b53c18..41625975 100644 --- a/.github/workflows/w-first_rows-build-docker.yml +++ b/.github/workflows/w-first_rows-build-docker.yml @@ -14,0 +15 @@ on: + - 'vendors/' diff --git a/.github/workflows/w-first_rows.yml b/.github/workflows/w-first_rows.yml index dc735a49..cc0d7c46 100644 --- a/.github/workflows/w-first_rows.yml +++ b/.github/workflows/w-first_rows.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -28,2 +26,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/w-splits-build-docker.yml b/.github/workflows/w-splits-build-docker.yml index 68e091f9..5a72a6dd 100644 --- a/.github/workflows/w-splits-build-docker.yml +++ b/.github/workflows/w-splits-build-docker.yml @@ -14,0 +15 @@ on: + - 'vendors/' diff --git a/.github/workflows/w-splits.yml b/.github/workflows/w-splits.yml index 83a10c37..0b6d6322 100644 --- a/.github/workflows/w-splits.yml +++ b/.github/workflows/w-splits.yml @@ -13 +12,0 @@ on: - - 'tools/Python.mk' @@ -28,2 +26,0 @@ jobs: - secrets: - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.vscode/monorepo.code-workspace b/.vscode/monorepo.code-workspace index 5c788f47..6a144da6 100644 --- a/.vscode/monorepo.code-workspace +++ b/.vscode/monorepo.code-workspace @@ -10,0 +11,4 @@ + { + "name": "jobs/mongodb_migration", + "path": "../jobs/mongodb_migration" + }, @@ -46,0 +51 @@ + "jobs": true, diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 9d7ff154..205d5849 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -3,0 +4,3 @@ + "jobs": { + "mongodbMigration": "huggingface/datasets-server-jobs-mongodb_migration:sha-a514e54" + }, diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index 81380208..db143712 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -26,0 +27,3 @@ common: +# --- jobs (pre-install/upgrade hooks) --- + + diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 267fe7a8..515750e2 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -68,0 +69,6 @@ common: +# --- jobs (pre-install/upgrade hooks) --- + +mongodbMigration: + nodeSelector: + role-datasets-server: "true" + diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index 5c48b155..4e20e03f 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -52,0 +53,5 @@ app: "{{ .Release.Name }}-reverse-proxy" +{{- define "labels.mongodbMigration" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-mongodb-migration" +{{- end -}} + diff --git a/chart/templates/jobs/mongodb-migration/_container.tpl b/chart/templates/jobs/mongodb-migration/_container.tpl new file mode 100644 index 00000000..09b87d31 --- /dev/null +++ b/chart/templates/jobs/mongodb-migration/_container.tpl @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerMongodbMigration" -}} +- name: "{{ include "name" . }}-mongodb-migration" + image: {{ .Values.dockerImage.jobs.mongodbMigration }} + imagePullPolicy: IfNotPresent + env: + - name: CACHE_ASSETS_DIRECTORY + value: {{ .Values.cache.assetsDirectory | quote }} + - name: CACHE_MONGO_DATABASE + value: {{ .Values.cache.mongoDatabase | quote }} + - name: CACHE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + - name: QUEUE_MONGO_DATABASE + value: {{ .Values.queue.mongoDatabase | quote }} + - name: QUEUE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + - name: COMMON_ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" + - name: COMMON_HF_ENDPOINT + value: {{ .Values.common.hfEndpoint | quote }} + - name: COMMON_HF_TOKEN + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.hfToken | quote }} + key: HF_TOKEN + optional: false + - name: COMMON_LOG_LEVEL + value: {{ .Values.common.logLevel | quote }} + - name: MONGODB_MIGRATION_MONGO_DATABASE + value: {{ .Values.mongodbMigration.mongoDatabase | quote }} + - name: MONGODB_MIGRATION_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + volumeMounts: + - mountPath: {{ .Values.cache.assetsDirectory | quote }} + mountPropagation: None + name: nfs + subPath: "{{ include "assets.subpath" . }}" + readOnly: false + securityContext: + allowPrivilegeEscalation: false + resources: + {{ toYaml .Values.mongodbMigration.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/jobs/mongodb-migration/job.yaml b/chart/templates/jobs/mongodb-migration/job.yaml new file mode 100644 index 00000000..2ee5a5f3 --- /dev/null +++ b/chart/templates/jobs/mongodb-migration/job.yaml @@ -0,0 +1,39 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +apiVersion: batch/v1 +kind: Job +metadata: + labels: + {{ include "labels.mongodbMigration" . | nindent 4 }} + name: "{{ include "release" . }}-job-mongodb-migration" + namespace: {{ .Release.Namespace }} + annotations: + "helm.sh/hook": pre-install,pre-upgrade + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded +spec: + template: + metadata: + labels: + {{ include "labels.mongodbMigration" . | nindent 8 }} + spec: + restartPolicy: Never + initContainers: + {{ include "initContainerAssets" . | nindent 8 }} + containers: + {{ include "containerMongodbMigration" . | nindent 8 }} + nodeSelector: + {{ toYaml .Values.mongodbMigration.nodeSelector | nindent 8 }} + tolerations: + {{ toYaml .Values.mongodbMigration.tolerations | nindent 8 }} + volumes: + - name: nfs + nfs: + server: {{ .Values.storage.nfs.server }} + path: {{ .Values.storage.nfs.path }} + securityContext: + runAsUser: {{ .Values.uid }} + runAsGroup: {{ .Values.gid }} + runAsNonRoot: true + backoffLimit: 0 diff --git a/chart/values.yaml b/chart/values.yaml index c1bc92ea..4d38807c 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -28,0 +29,2 @@ dockerImage: + jobs: + mongodb_migration: "" @@ -68,0 +71,14 @@ numbaCacheDirectory: "/numba-cache" +# --- jobs (pre-install/upgrade hooks) --- + +mongodbMigration: + # Name of the mongo db database used for storing the migrations history + mongoDatabase: "datasets_server_maintenance" + + nodeSelector: {} + resources: + requests: + cpu: 1 + limits: + cpu: 1 + tolerations: [] + diff --git a/jobs/mongodb_migration/.flake8 b/jobs/mongodb_migration/.flake8 new file mode 100644 index 00000000..f7d6157c --- /dev/null +++ b/jobs/mongodb_migration/.flake8 @@ -0,0 +1,5 @@ +[flake8] +# Recommend matching the black line length (119), +# rather than using the flake8 default of 79: +max-line-length = 119 +extend-ignore = "E203" diff --git a/jobs/mongodb_migration/.python-version b/jobs/mongodb_migration/.python-version new file mode 100644 index 00000000..1635d0f5 --- /dev/null +++ b/jobs/mongodb_migration/.python-version @@ -0,0 +1 @@ +3.9.6 diff --git a/jobs/mongodb_migration/Dockerfile b/jobs/mongodb_migration/Dockerfile new file mode 100644 index 00000000..7bfecb75 --- /dev/null +++ b/jobs/mongodb_migration/Dockerfile @@ -0,0 +1,33 @@ +# build with +# docker build -t some_tag_admin -f Dockerfile ../.. +FROM python:3.9.6-slim + +ENV PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + POETRY_NO_INTERACTION=1 \ + # Versions: + POETRY_VERSION=1.1.12 \ + POETRY_VIRTUALENVS_IN_PROJECT=true + +# System deps: +RUN apt-get update \ + && apt-get install -y build-essential python3-dev make \ + && rm -rf /var/lib/apt/lists/* +RUN pip install -U --no-cache-dir pip +RUN pip install "poetry==$POETRY_VERSION" + +WORKDIR /src +COPY libs/libcache/dist ./libs/libcache/dist +COPY libs/libqueue/dist ./libs/libqueue/dist +COPY libs/libcommon/dist ./libs/libcommon/dist +COPY jobs/mongodb_migration/src ./jobs/mongodb_migration/src +COPY jobs/mongodb_migration/poetry.lock ./jobs/mongodb_migration/poetry.lock +COPY jobs/mongodb_migration/pyproject.toml ./jobs/mongodb_migration/pyproject.toml +WORKDIR /src/jobs/mongodb_migration/ +RUN poetry install + +ENTRYPOINT ["poetry", "run", "python", "src/mongodb_migration/main.py"] diff --git a/jobs/mongodb_migration/Makefile b/jobs/mongodb_migration/Makefile new file mode 100644 index 00000000..250ab38f --- /dev/null +++ b/jobs/mongodb_migration/Makefile @@ -0,0 +1,16 @@ +# environment variables for the commands (docker-compose, poetry) +export COMPOSE_PROJECT_NAME := mongodb_migration +export MONGO_PORT := 27060 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export MONGODB_MIGRATION_MONGO_URL := mongodb://localhost:${MONGO_PORT} +# makefile variables +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml + +include ../../tools/Python.mk +include ../../tools/PythonTest.mk +include ../../tools/Docker.mk + +.PHONY: run +run: + poetry run python src/mongodb_migration/main.py diff --git a/jobs/mongodb_migration/README.md b/jobs/mongodb_migration/README.md new file mode 100644 index 00000000..e9e1db79 --- /dev/null +++ b/jobs/mongodb_migration/README.md @@ -0,0 +1,46 @@ +# Datasets server databases migrations + +> Scripts to migrate the datasets server databases + +## Configuration + +The script con be configured using environment variables. They are grouped by scope. + +### Admin service + +Set environment variables to configure the job (`MONGODB_MIGRATION_` prefix): + +- `MONGODB_MIGRATION_MONGO_DATABASE`: the name of the database used for storing the migrations history. Defaults to `"datasets_server_maintenance"`. +- `MONGODB_MIGRATION_MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. + +### Cache + +See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. + +### Queue + +See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. + +### Common + +See [../../libs/libcommon/README.md](../../libs/libcommon/README.md) for more information about the common configuration. + +## Script + +The script: + +- `run`: run all the migrations. First look at the previously executed migrations. Run the new ones, and revert them in case of error. + +To launch the scripts: + +- if the image runs in a docker container: + + ```shell + docker exec -it datasets-server_mongodb_migration_1 make <SCRIPT> + ``` + +- if the image runs in a kube pod: + + ```shell + kubectl exec datasets-server-prod-mongodb_migration-5cc8f8fcd7-k7jfc -- make <SCRIPT> + ``` diff --git a/jobs/mongodb_migration/poetry.lock b/jobs/mongodb_migration/poetry.lock new file mode 100644 index 00000000..986ac3ff --- /dev/null +++ b/jobs/mongodb_migration/poetry.lock @@ -0,0 +1,911 @@ +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "22.1.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "bandit" +version = "1.7.4" +description = "Security oriented static analyser for python code." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +stevedore = ">=1.20.0" + +[package.extras] +test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] +toml = ["toml"] +yaml = ["pyyaml"] + +[[package]] +name = "black" +version = "22.10.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2022.9.24" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "charset-normalizer" +version = "2.1.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dnspython" +version = "1.16.0" +description = "DNS toolkit" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +DNSSEC = ["pycryptodome", "ecdsa (>=0.13)"] +IDNA = ["idna (>=2.1)"] + +[[package]] +name = "dparse" +version = "0.6.2" +description = "A parser for Python dependency files" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +packaging = "*" +toml = "*" + +[package.extras] +pipenv = ["pipenv"] +conda = ["pyyaml"] + +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + +[[package]] +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.29" +description = "GitPython is a python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "libcache" +version = "0.3.4" +description = "Library for the cache in mongodb" +category = "main" +optional = false +python-versions = "==3.9.6" + +[package.dependencies] +appdirs = ">=1.4.4,<2.0.0" +environs = ">=9.5.0,<10.0.0" +mongo-types = "0.15.1" +mongoengine = ">=0.24.1,<0.25.0" +pymongo = {version = ">=3.12.3,<4.0.0", extras = ["srv"]} + +[package.source] +type = "file" +url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" + +[[package]] +name = "libcommon" +version = "0.3.2" +description = "Library for utils, common to all the services and workers" +category = "main" +optional = false +python-versions = "==3.9.6" + +[package.dependencies] +environs = ">=9.5.0,<10.0.0" +orjson = ">=3.6.4,<4.0.0" + +[package.source] +type = "file" +url = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl" + +[[package]] +name = "libqueue" +version = "0.4.8" +description = "Library for the jobs queue in mongodb" +category = "main" +optional = false +python-versions = "==3.9.6" + +[package.dependencies] +environs = ">=9.5.0,<10.0.0" +mongo-types = "0.15.1" +mongoengine = ">=0.24.1,<0.25.0" +packaging = ">=21.3,<22.0" +psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.12.3,<4.0.0", extras = ["srv"]} + +[package.source] +type = "file" +url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" + +[[package]] +name = "marshmallow" +version = "3.19.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mongo-types" +version = "0.15.1" +description = "Type stubs for mongoengine w/ basic support for bson and pymongo" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[[package]] +name = "mongoengine" +version = "0.24.2" +description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pymongo = ">=3.4,<5.0" + +[[package]] +name = "mypy" +version = "0.812" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +mypy-extensions = ">=0.4.3,<0.5.0" +typed-ast = ">=1.4.0,<1.5.0" +typing-extensions = ">=3.7.4" + +[package.extras] +dmypy = ["psutil (>=4.0)"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "orjson" +version = "3.8.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.10.2" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pbr" +version = "5.11.0" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "platformdirs" +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "poetryup" +version = "0.3.15" +description = "Update dependencies and bump their version in the pyproject.toml file" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +tomlkit = ">=0.7.2,<0.8.0" + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.7.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyflakes" +version = "2.3.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pymongo" +version = "3.13.0" +description = "Python driver for MongoDB <http://www.mongodb.org>" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +dnspython = {version = ">=1.16.0,<1.17.0", optional = true, markers = "extra == \"srv\""} + +[package.extras] +aws = ["pymongo-auth-aws (<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] +gssapi = ["pykerberos"] +ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)", "certifi"] +snappy = ["python-snappy"] +srv = ["dnspython (>=1.16.0,<1.17.0)"] +tls = ["ipaddress"] +zstd = ["zstandard"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "2.12.1" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +coverage = ">=5.2.1" +pytest = ">=4.6" +toml = "*" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruamel.yaml" +version = "0.17.21" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" +optional = false +python-versions = ">=3" + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""} + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel.yaml.clib" +version = "0.2.7" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "safety" +version = "2.3.1" +description = "Checks installed dependencies for known vulnerabilities and licenses." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" + +[package.extras] +github = ["pygithub (>=1.43.3)", "jinja2 (>=3.1.0)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "stevedore" +version = "4.1.1" +description = "Manage dynamic plugins for Python applications" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomlkit" +version = "0.7.2" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.12" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[metadata] +lock-version = "1.1" +python-versions = "3.9.6" +content-hash = "037d3a8342a3cc4f2add1dcb12f50eeee5c3d0a62a733fb3472c17fe258fff1b" + +[metadata.files] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +atomicwrites = [] +attrs = [] +bandit = [ + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, +] +black = [] +certifi = [] +charset-normalizer = [] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [] +coverage = [] +dnspython = [ + {file = "dnspython-1.16.0-py2.py3-none-any.whl", hash = "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"}, + {file = "dnspython-1.16.0.zip", hash = "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"}, +] +dparse = [] +environs = [] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] +gitpython = [] +idna = [] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] +libcache = [ + {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, +] +libcommon = [ + {file = "libcommon-0.3.2-py3-none-any.whl", hash = "sha256:774292c9ea2d9ab50c4bf2a3eb212bfaf924096a4279ecf117b70618702d978d"}, +] +libqueue = [ + {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, +] +marshmallow = [] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mongo-types = [ + {file = "mongo-types-0.15.1.tar.gz", hash = "sha256:0a9deeb7733ea7da5db3711d92e22d93556b522f860bbff82e5df44c53bd06a9"}, + {file = "mongo_types-0.15.1-py3-none-any.whl", hash = "sha256:9417ae5b9a759c09630b5ec7d66904cc333c2d2fcfe75e2760a332ed5e267309"}, +] +mongoengine = [] +mypy = [ + {file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"}, + {file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"}, + {file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"}, + {file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"}, + {file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"}, + {file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"}, + {file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"}, + {file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"}, + {file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"}, + {file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"}, + {file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"}, + {file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"}, + {file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"}, + {file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"}, + {file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"}, + {file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"}, + {file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"}, + {file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"}, + {file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"}, + {file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"}, + {file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"}, + {file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +orjson = [] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [] +pbr = [] +platformdirs = [] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +poetryup = [ + {file = "poetryup-0.3.15-py3-none-any.whl", hash = "sha256:db068f55d10c0f89c76ea2b62c6bb81c0b0512454f7a83bdc0a13c146e5fb13e"}, + {file = "poetryup-0.3.15.tar.gz", hash = "sha256:efa4e7bb0cd005db4aff3cc678c8bfba9474ef42d5759c0168f2a55fc0f17bc3"}, +] +psutil = [] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pymongo = [] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pytest = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +pytest-cov = [ + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, +] +python-dotenv = [] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +"ruamel.yaml" = [] +"ruamel.yaml.clib" = [] +safety = [] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +stevedore = [] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tomlkit = [ + {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"}, + {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"}, +] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] +typing-extensions = [] +urllib3 = [] diff --git a/jobs/mongodb_migration/poetry.toml b/jobs/mongodb_migration/poetry.toml new file mode 100644 index 00000000..ab1033bd --- /dev/null +++ b/jobs/mongodb_migration/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/jobs/mongodb_migration/pyproject.toml b/jobs/mongodb_migration/pyproject.toml new file mode 100644 index 00000000..dbeffbdf --- /dev/null +++ b/jobs/mongodb_migration/pyproject.toml @@ -0,0 +1,48 @@ +[tool.poetry] +authors = ["Sylvain Lesage <[email protected]>"] +description = "MongoDB database migration job" +name = "mongodb_migration" +version = "0.0.1" +license = "Apache-2.0" + +[tool.poetry.dependencies] +environs = "^9.5.0" +libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } +python = "3.9.6" + +[tool.poetry.dev-dependencies] +bandit = "^1.7.0" +black = "^22.1.0" +flake8 = "^3.9.2" +isort = "^5.9.3" +mypy = "0.812" +poetryup = "^0.3.8" +pytest = "^6.2.5" +pytest-cov = "^2.12.1" +safety = "^2.1.1" + +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.0.0"] + +[tool.pytest.ini_options] +#addopts = "-k 'wip'" +filterwarnings = ["ignore::DeprecationWarning"] +markers = [ + "wip: tests being developed" +] + +[tool.coverage.run] +source = ["mongodb_migration"] + +[tool.isort] +profile = "black" + +[tool.black] +line-length = 119 +preview = true + +[tool.mypy] +strict = true diff --git a/jobs/mongodb_migration/src/mongodb_migration/__init__.py b/jobs/mongodb_migration/src/mongodb_migration/__init__.py new file mode 100644 index 00000000..1e9d0c5a --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. diff --git a/jobs/mongodb_migration/src/mongodb_migration/check.py b/jobs/mongodb_migration/src/mongodb_migration/check.py new file mode 100644 index 00000000..180d0042 --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/check.py @@ -0,0 +1,50 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. +# adapted from https://docs.mongoengine.org/guide/migration.html#post-processing-checks + +from typing import Callable, Iterator, List, Optional, Type, TypeVar + +from mongoengine import Document +from pymongo.collection import Collection + + +# --- some typing subtleties, see https://github.com/sbdchd/mongo-types +class DocumentWithId(Document): + id: str + + +U = TypeVar("U", bound=DocumentWithId) +DocumentClass = Type[U] +CustomValidation = Callable[[U], None] +# --- end + + +def get_random_oids(collection: Collection, sample_size: int) -> List[int]: + pipeline = [{"$project": {"_id": 1}}, {"$sample": {"size": sample_size}}] + return [s["_id"] for s in collection.aggregate(pipeline)] + + +def get_random_documents(DocCls: DocumentClass, sample_size: int) -> Iterator[DocumentWithId]: + doc_collection = DocCls._get_collection() + random_oids = get_random_oids(doc_collection, sample_size) + return DocCls.objects(id__in=random_oids) # type: ignore + + +def check_documents(DocCls: DocumentClass, sample_size: int, custom_validation: Optional[CustomValidation] = None): + for doc in get_random_documents(DocCls, sample_size): + # general validation (types and values) + doc.validate() + + # load all subfields, + # this may trigger additional queries if you have ReferenceFields + # so it may be slow + for field in doc._fields: + try: + getattr(doc, field) + except Exception: + print(f"Could not load field {field} in Document {doc.id}") + raise + + # custom validation + if custom_validation is not None: + custom_validation(doc) diff --git a/jobs/mongodb_migration/src/mongodb_migration/collector.py b/jobs/mongodb_migration/src/mongodb_migration/collector.py new file mode 100644 index 00000000..49463307 --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/collector.py @@ -0,0 +1,15 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import List + +from mongodb_migration.migration import Migration +from mongodb_migration.migrations._20221110230400_example import MigrationExample + + +# TODO: add a way to automatically collect migrations from the migrations/ folder +class MigrationsCollector: + def get_migrations(self) -> List[Migration]: + return [ + MigrationExample(version="20221110230400", description="example"), + ] diff --git a/jobs/mongodb_migration/src/mongodb_migration/config.py b/jobs/mongodb_migration/src/mongodb_migration/config.py new file mode 100644 index 00000000..39c6051d --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/config.py @@ -0,0 +1,38 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from environs import Env +from libcache.config import CacheConfig +from libcommon.config import CommonConfig +from libqueue.config import QueueConfig + +from mongodb_migration.database_migrations import connect_to_database + + +class MongodbMigrationConfig: + mongo_database: str + mongo_url: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("MONGODB_MIGRATION_"): + self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_maintenance") + self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") + self.setup() + + def setup(self): + connect_to_database(database=self.mongo_database, host=self.mongo_url) + + +class JobConfig: + cache: CacheConfig + common: CommonConfig + mongodb_migration: MongodbMigrationConfig + queue: QueueConfig + + def __init__(self): + # First process the common configuration to setup the logging + self.common = CommonConfig() + self.cache = CacheConfig() + self.mongodb_migration = MongodbMigrationConfig() + self.queue = QueueConfig() diff --git a/jobs/mongodb_migration/src/mongodb_migration/database_migrations.py b/jobs/mongodb_migration/src/mongodb_migration/database_migrations.py new file mode 100644 index 00000000..c6590d7d --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/database_migrations.py @@ -0,0 +1,62 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import types +from typing import Generic, Type, TypeVar + +from mongoengine import Document, DoesNotExist, connect +from mongoengine.fields import StringField +from mongoengine.queryset.queryset import QuerySet + +# START monkey patching ### hack ### +# see https://github.com/sbdchd/mongo-types#install +U = TypeVar("U", bound=Document) + + +def no_op(self, x): # type: ignore + return self + + +QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) + + +class QuerySetManager(Generic[U]): + def __get__(self, instance: object, cls: Type[U]) -> QuerySet[U]: + return QuerySet(cls, cls._get_collection()) + + +# END monkey patching ### hack ### + +DATABASE_ALIAS = "maintenance" + + +def connect_to_database(database: str, host: str) -> None: + connect(db=database, alias=DATABASE_ALIAS, host=host) + + +class DatabaseMigration(Document): + """A database migration that has already been executed. + + Args: + version (`str`): The version of the migration, with the format YYYYMMDDHHMMSS + description (`str`): A description of the migration + """ + + meta = { + "collection": "databaseMigrations", + "db_alias": DATABASE_ALIAS, + } + version = StringField(required=True) + description = StringField(required=True) + + objects = QuerySetManager["DatabaseMigration"]() + + +# only for the tests +def _clean_maintenance_database() -> None: + """Delete all the jobs in the database""" + DatabaseMigration.drop_collection() # type: ignore + + +# explicit re-export +__all__ = ["DoesNotExist"] diff --git a/jobs/mongodb_migration/src/mongodb_migration/main.py b/jobs/mongodb_migration/src/mongodb_migration/main.py new file mode 100644 index 00000000..8773d957 --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/main.py @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from mongodb_migration.collector import MigrationsCollector +from mongodb_migration.config import JobConfig +from mongodb_migration.plan import Plan + +if __name__ == "__main__": + job_config = JobConfig() + collected_migrations = MigrationsCollector().get_migrations() + Plan(collected_migrations=collected_migrations).execute() + +# See: +# https://blog.appsignal.com/2020/04/14/dissecting-rails-migrationsl.html +# https://edgeguides.rubyonrails.org/active_record_migrations.html +# https://docs.mongoengine.org/guide/migration.html +# https://andrewlock.net/deploying-asp-net-core-applications-to-kubernetes-part-7-running-database-migrations/ +# https://helm.sh/docs/topics/charts_hooks/ diff --git a/jobs/mongodb_migration/src/mongodb_migration/migration.py b/jobs/mongodb_migration/src/mongodb_migration/migration.py new file mode 100644 index 00000000..5ac444f5 --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/migration.py @@ -0,0 +1,32 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. +import datetime +from abc import ABC, abstractmethod + + +class IrreversibleMigration(Exception): + pass + + +class Migration(ABC): + def __init__(self, version: str, description: str): + if version is None or description is None: + raise ValueError("The version and the description are required.") + try: + datetime.datetime.strptime(version, "%Y%m%d%H%M%S") + except Exception as e: + raise ValueError("The version should be a string representing a date in the format YYYYMMDDHHMMSS") from e + self.version = version + self.description = description + + @abstractmethod + def up(self) -> None: + raise NotImplementedError() + + @abstractmethod + def validate(self) -> None: + raise NotImplementedError() + + @abstractmethod + def down(self) -> None: + raise IrreversibleMigration() diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221110230400_example.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221110230400_example.py new file mode 100644 index 00000000..ca33eae3 --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/_20221110230400_example.py @@ -0,0 +1,17 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging + +from mongodb_migration.migration import Migration + + +class MigrationExample(Migration): + def up(self) -> None: + logging.info("Example migration, upgrade step") + + def down(self) -> None: + logging.info("Example migration, downgrade step") + + def validate(self) -> None: + logging.info("Example migration, validation is OK") diff --git a/jobs/mongodb_migration/src/mongodb_migration/migrations/__init__.py b/jobs/mongodb_migration/src/mongodb_migration/migrations/__init__.py new file mode 100644 index 00000000..1e9d0c5a --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/migrations/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. diff --git a/jobs/mongodb_migration/src/mongodb_migration/plan.py b/jobs/mongodb_migration/src/mongodb_migration/plan.py new file mode 100644 index 00000000..b0012836 --- /dev/null +++ b/jobs/mongodb_migration/src/mongodb_migration/plan.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging +from typing import List + +from mongodb_migration.database_migrations import DatabaseMigration +from mongodb_migration.migration import Migration + + +class SavedMigrationsError(Exception): + pass + + +class Plan: + collected_migrations: List[Migration] + executed_migrations: List[Migration] + + def __init__(self, collected_migrations: List[Migration]): + self.collected_migrations = collected_migrations + self.executed_migrations = [] + + def get_saved_migrations_versions(self) -> List[str]: + return DatabaseMigration.objects().distinct("version") + + def get_planned_migrations(self) -> List[Migration]: + saved_migrations_versions = sorted(self.get_saved_migrations_versions()) + collected_migrations = sorted(self.collected_migrations, key=lambda m: m.version) + first_collected_migrations_versions = [ + migration.version for migration in collected_migrations[: len(saved_migrations_versions)] + ] + if saved_migrations_versions != first_collected_migrations_versions: + logging.error( + "Database migrations are not in sync with collected migrations. Database:" + f" {saved_migrations_versions}, Collected: {first_collected_migrations_versions}" + ) + raise SavedMigrationsError( + "The saved migrations in the database should be the first collected migrations." + ) + num_saved_migrations = len(saved_migrations_versions) + num_collected_migrations = len(collected_migrations) + if not num_collected_migrations: + logging.error("No collected migrations") + if num_saved_migrations: + logging.info(f"{num_saved_migrations} migrations have already been applied. They will be skipped.") + if num_saved_migrations == len(collected_migrations): + logging.info("All migrations have already been applied.") + return collected_migrations[num_saved_migrations:] + + def execute(self): + try: + self.apply() + except Exception as e: + logging.error(f"Migration failed: {e}") + self.rollback() + + def apply(self): + logging.info("Start migrations") + self.executed_migrations = [] + for migration in self.get_planned_migrations(): + self.executed_migrations.append(migration) + logging.info(f"Migrate {migration.version}: add to the migrations collection") + self.save(migration) + logging.info(f"Migrate {migration.version}: apply") + migration.up() + logging.info(f"Migrate {migration.version}: validate") + migration.validate() + logging.info(f"Migrate {migration.version}: done") + logging.info("All migrations have been applied") + + def rollback(self): + logging.info("Start rollback") + try: + while self.executed_migrations: + migration = self.executed_migrations[-1] + logging.info(f"Rollback {migration.version}: roll back") + migration.down() + logging.info(f"Rollback {migration.version}: removed from the migrations collection") + self.remove(migration) + logging.info(f"Rollback {migration.version}: done") + self.executed_migrations.pop() + logging.info("All executed migrations have been rolled back") + except Exception as e: + logging.error( + f"Rollback failed: {e}. The database is in an inconsistent state. Try to restore the backup manually." + ) + raise e + + def save(self, migration: Migration): + DatabaseMigration(version=migration.version, description=migration.description).save() + + def remove(self, migration: Migration) -> None: + DatabaseMigration.objects(version=migration.version).delete() diff --git a/jobs/mongodb_migration/src/mongodb_migration/py.typed b/jobs/mongodb_migration/src/mongodb_migration/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/jobs/mongodb_migration/tests/__init__.py b/jobs/mongodb_migration/tests/__init__.py new file mode 100644 index 00000000..1e9d0c5a --- /dev/null +++ b/jobs/mongodb_migration/tests/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. diff --git a/jobs/mongodb_migration/tests/conftest.py b/jobs/mongodb_migration/tests/conftest.py new file mode 100644 index 00000000..14fea96f --- /dev/null +++ b/jobs/mongodb_migration/tests/conftest.py @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from pytest import MonkeyPatch, fixture + +from mongodb_migration.config import JobConfig + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + monkeypatch_session.setenv("MONGODB_MIGRATION_MONGO_DATABASE", "datasets_server_maintenance_test") + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session", autouse=True) +def app_config(monkeypatch_session: MonkeyPatch) -> JobConfig: + job_config = JobConfig() + if ( + "test" not in job_config.cache.mongo_database + or "test" not in job_config.queue.mongo_database + or "test" not in job_config.mongodb_migration.mongo_database + ): + raise ValueError("Test must be launched on a test mongo database") + return job_config diff --git a/jobs/mongodb_migration/tests/test_collector.py b/jobs/mongodb_migration/tests/test_collector.py new file mode 100644 index 00000000..2c425a0a --- /dev/null +++ b/jobs/mongodb_migration/tests/test_collector.py @@ -0,0 +1,12 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from mongodb_migration.collector import MigrationsCollector + + +def test_collector(): + collector = MigrationsCollector() + migrations = collector.get_migrations() + assert len(migrations) == 1 + assert migrations[0].version == "20221110230400" + assert migrations[0].description == "example" diff --git a/jobs/mongodb_migration/tests/test_migration.py b/jobs/mongodb_migration/tests/test_migration.py new file mode 100644 index 00000000..8790bf80 --- /dev/null +++ b/jobs/mongodb_migration/tests/test_migration.py @@ -0,0 +1,45 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Optional, Type + +import pytest + +from mongodb_migration.migration import Migration + + +class MigrationOK(Migration): + def up(self) -> None: + pass + + def down(self) -> None: + pass + + def validate(self) -> None: + pass + + +version_ok = "20221110230400" +description = "description a" +version_date_error = "20225510230400" +version_format_error = "wrong format" +version_too_short = "20221110" + + [email protected]( + "version,description,exception", + [ + (version_ok, None, ValueError), + (None, description, ValueError), + (version_date_error, description, ValueError), + (version_format_error, description, ValueError), + (version_too_short, description, ValueError), + (version_ok, description, None), + ], +) +def test_migration(version: str, description: str, exception: Optional[Type[Exception]]): + if exception is None: + MigrationOK(version=version, description=description) + else: + with pytest.raises(exception): + MigrationOK(version=version, description=description) diff --git a/jobs/mongodb_migration/tests/test_plan.py b/jobs/mongodb_migration/tests/test_plan.py new file mode 100644 index 00000000..c7503ffc --- /dev/null +++ b/jobs/mongodb_migration/tests/test_plan.py @@ -0,0 +1,263 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import datetime +from typing import List, Optional, Type + +import pytest +from libcache.simple_cache import SplitsResponse +from libqueue.queue import Job, Status + +from mongodb_migration.check import check_documents +from mongodb_migration.database_migrations import ( + DatabaseMigration, + _clean_maintenance_database, +) +from mongodb_migration.migration import IrreversibleMigration, Migration +from mongodb_migration.plan import Plan, SavedMigrationsError + + [email protected](autouse=True) +def clean_mongo_database() -> None: + _clean_maintenance_database() + + +class MigrationOK(Migration): + def up(self) -> None: + pass + + def down(self) -> None: + pass + + def validate(self) -> None: + pass + + +class MigrationErrorInUp(Migration): + def up(self) -> None: + raise RuntimeError("Error in up") + + def down(self) -> None: + pass + + def validate(self) -> None: + pass + + +class MigrationErrorInValidate(Migration): + def up(self) -> None: + pass + + def down(self) -> None: + pass + + def validate(self) -> None: + raise RuntimeError("Error in validation") + + +class MigrationErrorInUpAndDown(Migration): + def up(self) -> None: + raise RuntimeError("Error in up") + + def down(self) -> None: + raise RuntimeError("Error in down") + + def validate(self) -> None: + pass + + +class MigrationErrorIrreversible(Migration): + def up(self) -> None: + raise RuntimeError("Error in up") + + def down(self) -> None: + raise IrreversibleMigration("Error in down") + + def validate(self) -> None: + pass + + +def test_empty_plan(): + plan = Plan(collected_migrations=[]) + + assert plan.collected_migrations == [] + plan.execute() + assert plan.executed_migrations == [] + + +migration_ok_a = MigrationOK(version="20221110230400", description="ok a") +migration_ok_b = MigrationOK(version="20221110230401", description="ok b") +migration_error_in_up = MigrationErrorInUp(version="20221110230402", description="error in up") +migration_error_in_validate = MigrationErrorInValidate(version="20221110230403", description="error in validate") +migration_error_in_up_and_down = MigrationErrorInUpAndDown( + version="20221110230404", description="error in up and down" +) +migration_error_irreversible = MigrationErrorIrreversible( + version="20221110230405", description="error because migration is irreversible" +) + + [email protected]( + "collected_migrations", + ( + [migration_ok_a, migration_ok_b], + [migration_ok_b, migration_ok_a], + ), +) +def test_collected_migrations_order_dont_matter(collected_migrations: List[Migration]): + assert DatabaseMigration.objects.distinct("version") == [] + plan = Plan(collected_migrations=collected_migrations) + assert plan.executed_migrations == [] + plan.execute() + sorted_migrations = sorted(collected_migrations, key=lambda migration: migration.version) + assert plan.executed_migrations == sorted_migrations + assert DatabaseMigration.objects.distinct("version") == [migration.version for migration in sorted_migrations] + + [email protected]( + "collected_migrations,executed_migrations,exception", + [ + ([migration_error_in_up], [], None), + ([migration_error_in_validate], [], None), + ([migration_error_in_up_and_down], [migration_error_in_up_and_down], RuntimeError), + ([migration_error_irreversible], [migration_error_irreversible], IrreversibleMigration), + ([migration_ok_a, migration_error_in_up], [], None), + ( + [migration_ok_a, migration_error_in_up_and_down], + [migration_ok_a, migration_error_in_up_and_down], + RuntimeError, + ), + ], +) +def test_errors_in_migration_steps( + collected_migrations: List[Migration], executed_migrations: List[Migration], exception: Optional[Type[Exception]] +): + assert DatabaseMigration.objects.distinct("version") == [] + plan = Plan(collected_migrations=collected_migrations) + assert plan.executed_migrations == [] + if exception is None: + # rollback worked + plan.execute() + else: + # rollback failed + with pytest.raises(exception): + plan.execute() + assert plan.executed_migrations == executed_migrations + assert DatabaseMigration.objects.distinct("version") == [migration.version for migration in executed_migrations] + + [email protected]( + "previous_migrations,collected_migrations,executed_migrations,exception", + [ + ([], [], [], None), + ([], [migration_ok_a], [migration_ok_a], None), + ([migration_ok_a], [migration_ok_a, migration_ok_b], [migration_ok_b], None), + # the previous migrations must be in the collected migrations + ([migration_ok_a], [], [], SavedMigrationsError), + ([migration_ok_a], [migration_ok_b], [], SavedMigrationsError), + # error with the versions order + ([migration_ok_b], [migration_ok_a, migration_ok_b], [], SavedMigrationsError), + ], +) +def test_get_planned_migrations( + previous_migrations: List[Migration], + collected_migrations: List[Migration], + executed_migrations: List[Migration], + exception: Optional[Type[Exception]], +): + for migration in previous_migrations: + DatabaseMigration(version=migration.version, description=migration.description).save() + assert DatabaseMigration.objects.distinct("version") == [migration.version for migration in previous_migrations] + plan = Plan(collected_migrations=collected_migrations) + assert plan.executed_migrations == [] + if exception is None: + # up worked + plan.apply() + else: + # up failed + with pytest.raises(exception): + plan.apply() + assert plan.executed_migrations == executed_migrations + assert DatabaseMigration.objects.distinct("version") == [ + migration.version for migration in (previous_migrations + executed_migrations) + ] + + +def test_internal_operations_are_idempotent(): + plan = Plan(collected_migrations=[migration_ok_a, migration_ok_b]) + plan.rollback() + plan.rollback() + plan.rollback() + plan.apply() + plan.apply() + plan.apply() + plan.apply() + plan.rollback() + plan.apply() + plan.rollback() + + +def test_execute_is_idempotent(): + plan = Plan(collected_migrations=[migration_ok_a, migration_ok_b]) + plan.execute() + plan.execute() + Plan(collected_migrations=[migration_ok_a, migration_ok_b]).execute() + + +def test_queue_and_cache(): + # prepare + for i in range(100): + Job( + type="queue_a", + dataset=f"dataset{i}", + config="config", + split="split", + unicity_id=f"abc{str(i)}", + namespace="dataset", + created_at=datetime.datetime.now(), + status=Status.WAITING, + ).save() + # Remove the field "stale", to simulate that we add it now + splits_response_collection = SplitsResponse._get_collection() + splits_response_collection.update_many({}, {"$unset": {"stale": False}}) + + class MigrationQueue(Migration): + def up(self) -> None: + job_collection = Job._get_collection() + job_collection.update_many({}, {"$set": {"status": Status.CANCELLED.value}}) + + def down(self) -> None: + raise IrreversibleMigration() + + def validate(self) -> None: + def custom_validation(doc: Job) -> None: + if doc.status != Status.CANCELLED: + raise ValueError("status is not cancelled") + + check_documents(DocCls=Job, sample_size=10, custom_validation=custom_validation) + if Job.objects(unicity_id="abc0").count() != 1: + raise ValueError('Job "abc0" not found') + + class MigrationCache(Migration): + def up(self) -> None: + splits_response_collection = SplitsResponse._get_collection() + splits_response_collection.update_many({}, {"$set": {"stale": False}}) + + def down(self) -> None: + splits_response_collection = SplitsResponse._get_collection() + splits_response_collection.update_many({}, {"$unset": {"stale": False}}) + + def validate(self) -> None: + def custom_validation(doc: SplitsResponse) -> None: + if not hasattr(doc, "stale"): + raise ValueError("Missing field 'stale'") + + check_documents(DocCls=SplitsResponse, sample_size=10, custom_validation=custom_validation) + + plan = Plan( + collected_migrations=[ + MigrationQueue(version="20221114223000", description="cancel jobs"), + MigrationCache(version="20221114223001", description="add stale field"), + ] + ) + plan.execute() diff --git a/tools/DockerRemoteImages.mk b/tools/DockerRemoteImages.mk index c83de45d..b28d0ef0 100644 --- a/tools/DockerRemoteImages.mk +++ b/tools/DockerRemoteImages.mk @@ -2,2 +2,2 @@ export IMAGE_REVERSE_PROXY := $(shell jq -r '.dockerImage.reverseProxy' ${DOCKER -export IMAGE_ADMIN := $(shell jq -r '.dockerImage.services.admin' ${DOCKER_IMAGES}) -export IMAGE_API := $(shell jq -r '.dockerImage.services.api' ${DOCKER_IMAGES}) +export IMAGE_SERVICE_ADMIN := $(shell jq -r '.dockerImage.services.admin' ${DOCKER_IMAGES}) +export IMAGE_SERVICE_API := $(shell jq -r '.dockerImage.services.api' ${DOCKER_IMAGES}) diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml index c180d315..12612786 100644 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ b/tools/docker-compose-datasets-server-from-remote-images.yml @@ -21 +21 @@ services: - image: ${IMAGE_ADMIN?IMAGE_ADMIN env var must be provided} + image: ${IMAGE_SERVICE_ADMIN?IMAGE_SERVICE_ADMIN env var must be provided} @@ -51 +51 @@ services: - image: ${IMAGE_API?IMAGE_API env var must be provided} + image: ${IMAGE_SERVICE_API?IMAGE_SERVICE_API env var must be provided}
0a10dd7d79abd26019e38aa143d057a2f48a6b81
Sylvain Lesage
2022-11-08T19:29:02
ci: 🎡 remove the token for codecov since the repo is public (#633)
diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index b9e7ebcc..fe8757c7 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -55 +55,6 @@ jobs: - run: poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests + run: poetry run python -m pytest -s --cov=./ --cov-report=xml:./coverage.xml --cov-report=term tests + - name: Prepare codecov flag (slash "/" is not allowed) + id: remove-slash + run: | + dir="${{ inputs.working-directory }}" + echo "codecov_flag=${dir/\//_}" >> $GITHUB_ENV @@ -58 +63 @@ jobs: - token: ${{ secrets.codecov-token }} + working-directory: ${{ inputs.working-directory }} @@ -59,0 +65,2 @@ jobs: + fail_ci_if_error: true + flags: ${{ env.codecov_flag }}
f49e9e5ff41cfb7e36e77ec8c5eac8c12046ba0c
Mishig
2022-11-08T15:50:37
Update pr docs actions (#632)
diff --git a/.github/workflows/doc-pr-build.yml b/.github/workflows/doc-pr-build.yml index f962ada5..fe01c960 100644 --- a/.github/workflows/doc-pr-build.yml +++ b/.github/workflows/doc-pr-build.yml @@ -18 +18 @@ jobs: - uses: huggingface/doc-builder/.github/workflows/build_pr_documentation.yml@main + uses: huggingface/doc-builder/.github/workflows/build_pr_documentation.yml@use_hf_hub @@ -25 +25,2 @@ jobs: - token: ${{ secrets.HUGGINGFACE_PUSH }} + token: ${{ secrets.HF_DOC_PUSH }} + comment_bot_token: ${{ secrets.HUGGINGFACE_PUSH }} diff --git a/.github/workflows/doc-pr-delete.yml b/.github/workflows/doc-pr-delete.yml index a4a39a4a..b2426810 100644 --- a/.github/workflows/doc-pr-delete.yml +++ b/.github/workflows/doc-pr-delete.yml @@ -13 +13 @@ jobs: - uses: huggingface/doc-builder/.github/workflows/delete_doc_comment.yml@main + uses: huggingface/doc-builder/.github/workflows/delete_doc_comment.yml@use_hf_hub @@ -18 +18,3 @@ jobs: - token: ${{ secrets.HUGGINGFACE_PUSH }} + token: ${{ secrets.HF_DOC_PUSH }} + comment_bot_token: ${{ secrets.HUGGINGFACE_PUSH }} +
06b7d6b38a6121f281a81480c7ce2d2de9aa4d10
Sylvain Lesage
2022-10-27T13:27:21
feat: 🎸 change mongo indexes (following cloud recommendations) (#627)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index ca1dbb97..9d7ff154 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5,2 +5,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-06e3fab", - "api": "huggingface/datasets-server-services-api:sha-06e3fab" + "admin": "huggingface/datasets-server-services-admin:sha-a9824d5", + "api": "huggingface/datasets-server-services-api:sha-a9824d5" @@ -9,2 +9,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-06e3fab", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-06e3fab" + "splits": "huggingface/datasets-server-workers-splits:sha-a9824d5", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-a9824d5" diff --git a/docs/poetry.lock b/docs/poetry.lock index 0bb72faa..ea3d2e7b 100644 --- a/docs/poetry.lock +++ b/docs/poetry.lock @@ -3 +3 @@ name = "attrs" -version = "21.4.0" +version = "22.1.0" @@ -7 +7 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" @@ -10 +10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] @@ -12,2 +12,2 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] @@ -17 +17 @@ name = "black" -version = "22.3.0" +version = "22.10.0" @@ -21 +21 @@ optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" @@ -28 +28 @@ platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} @@ -50 +50 @@ name = "colorama" -version = "0.4.5" +version = "0.4.6" @@ -54 +54 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" @@ -58 +58 @@ name = "fastjsonschema" -version = "2.15.3" +version = "2.16.2" @@ -69 +69 @@ name = "flake8" -version = "4.0.1" +version = "5.0.4" @@ -73 +73 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.1" @@ -76,3 +76,3 @@ python-versions = ">=3.6" -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" @@ -120 +120 @@ name = "jsonschema" -version = "4.6.0" +version = "4.16.0" @@ -136 +136 @@ name = "jupyter-core" -version = "4.10.0" +version = "4.11.2" @@ -151 +151 @@ name = "mccabe" -version = "0.6.1" +version = "0.7.0" @@ -155 +155 @@ optional = false -python-versions = "*" +python-versions = ">=3.6" @@ -167 +167 @@ name = "nbformat" -version = "5.4.0" +version = "5.7.0" @@ -180 +180 @@ traitlets = ">=5.1" -test = ["check-manifest", "testpath", "pytest", "pre-commit"] +test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] @@ -195 +195 @@ name = "pathspec" -version = "0.9.0" +version = "0.10.1" @@ -199 +199 @@ optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" @@ -215 +215 @@ name = "pycodestyle" -version = "2.8.0" +version = "2.9.1" @@ -219 +219 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" @@ -223 +223 @@ name = "pyflakes" -version = "2.4.0" +version = "2.5.0" @@ -227 +227 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" @@ -274 +274 @@ name = "tqdm" -version = "4.64.0" +version = "4.64.1" @@ -291 +291 @@ name = "traitlets" -version = "5.3.0" +version = "5.5.0" @@ -297,0 +298 @@ python-versions = ">=3.7" +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] @@ -302 +303 @@ name = "typing-extensions" -version = "4.2.0" +version = "4.4.0" @@ -325,29 +326,2 @@ content-hash = "c604ab0487eae9671f12cfba89dc09542868ff1302fc5c9e4c1be27a8e29b578 -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] -black = [ - {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, - {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, - {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, - {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, - {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, - {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, - {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, - {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, - {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, - {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, - {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, - {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, - {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, - {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, - {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, - {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, - {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, - {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, - {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, -] +attrs = [] +black = [] @@ -358,12 +332,3 @@ click = [ -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, - {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, -] -flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] +colorama = [] +fastjsonschema = [] +flake8 = [] @@ -378,12 +343,3 @@ isort = [ -jsonschema = [ - {file = "jsonschema-4.6.0-py3-none-any.whl", hash = "sha256:1c92d2db1900b668201f1797887d66453ab1fbfea51df8e4b46236689c427baf"}, - {file = "jsonschema-4.6.0.tar.gz", hash = "sha256:9d6397ba4a6c0bf0300736057f649e3e12ecbc07d3e81a0dacb72de4e9801957"}, -] -jupyter-core = [ - {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, - {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] +jsonschema = [] +jupyter-core = [] +mccabe = [] @@ -394,4 +350 @@ mypy-extensions = [ -nbformat = [ - {file = "nbformat-5.4.0-py3-none-any.whl", hash = "sha256:0d6072aaec95dddc39735c144ee8bbc6589c383fb462e4058abc855348152dad"}, - {file = "nbformat-5.4.0.tar.gz", hash = "sha256:44ba5ca6acb80c5d5a500f1e5b83ede8cbe364d5a495c4c8cf60aaf1ba656501"}, -] +nbformat = [] @@ -402,4 +355 @@ packaging = [ -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] +pathspec = [] @@ -410,8 +360,2 @@ platformdirs = [ -pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] -pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] +pycodestyle = [] +pyflakes = [] @@ -500,12 +444,3 @@ tomli = [ -tqdm = [ - {file = "tqdm-4.64.0-py2.py3-none-any.whl", hash = "sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"}, - {file = "tqdm-4.64.0.tar.gz", hash = "sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d"}, -] -traitlets = [ - {file = "traitlets-5.3.0-py3-none-any.whl", hash = "sha256:65fa18961659635933100db8ca120ef6220555286949774b9cfc106f941d1c7a"}, - {file = "traitlets-5.3.0.tar.gz", hash = "sha256:0bb9f1f9f017aa8ec187d8b1b2a7a6626a2a1d877116baba52a129bfa124f8e2"}, -] -typing-extensions = [ - {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, - {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, -] +tqdm = [] +traitlets = [] +typing-extensions = [] diff --git a/libs/libcache/dist/libcache-0.3.4-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.4-py3-none-any.whl new file mode 100644 index 00000000..871da54d Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.4-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.3.4.tar.gz b/libs/libcache/dist/libcache-0.3.4.tar.gz new file mode 100644 index 00000000..f6ba3fae Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.4.tar.gz differ diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index b6a984dd..43f617c2 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -5 +5 @@ name = "libcache" -version = "0.3.3" +version = "0.3.4" diff --git a/libs/libcache/src/libcache/simple_cache.py b/libs/libcache/src/libcache/simple_cache.py index ff59845f..3c63e531 100644 --- a/libs/libcache/src/libcache/simple_cache.py +++ b/libs/libcache/src/libcache/simple_cache.py @@ -68,2 +67,0 @@ class SplitsResponse(Document): - "http_status", - "stale", @@ -71 +69 @@ class SplitsResponse(Document): - ("error_code", "http_status"), + ("dataset_name", "-updated_at"), @@ -101 +99 @@ class FirstRowsResponse(Document): - ("error_code", "http_status"), + ("dataset_name", "-updated_at"), diff --git a/libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl new file mode 100644 index 00000000..c3f41386 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.8.tar.gz b/libs/libqueue/dist/libqueue-0.4.8.tar.gz new file mode 100644 index 00000000..5853480c Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.8.tar.gz differ diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index 6aad1231..8908d83a 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.4.7" +version = "0.4.8" diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index 55c536a8..c7a8e501 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -109,2 +109 @@ class Job(Document): - "collection": "jobs_blue", - # ^ https://en.wikipedia.org/wiki/Blue-green_deployment + "collection": "jobsBlue", @@ -116,0 +116,2 @@ class Job(Document): + ("status", "type", "created_at", "namespace"), + "-created_at", diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 54b409ef..037508d9 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -307 +307 @@ name = "libcache" -version = "0.3.3" +version = "0.3.4" @@ -322 +322 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" @@ -342 +342 @@ name = "libqueue" -version = "0.4.7" +version = "0.4.8" @@ -358 +358 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" @@ -863 +863 @@ python-versions = "3.9.6" -content-hash = "62ccd2d3d62a9c5f2bdafe5472d7a55439dbe5a295063fb50f85fb1de8ec6152" +content-hash = "e3da52de7c61f34cce97d1fc7975a6a757547cb6b6932a016b4b261a49befb2d" @@ -924 +924 @@ libcache = [ - {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, + {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, @@ -930 +930 @@ libqueue = [ - {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, + {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index f9293363..608827de 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -11 +11 @@ huggingface-hub = "^0.8.1" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 0eac8206..6f95b2b5 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -323 +323 @@ name = "libcache" -version = "0.3.3" +version = "0.3.4" @@ -338 +338 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" @@ -358 +358 @@ name = "libqueue" -version = "0.4.7" +version = "0.4.8" @@ -374 +374 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" @@ -905 +905 @@ python-versions = "3.9.6" -content-hash = "50b8e3501d30bea0fbcc237aebe4db07cbfa1556ba9107880e6a811b4b4101f0" +content-hash = "1231a8dff4cb257231da7de69a8b0ec09597a935dc8edf207470b39c3f590adb" @@ -964 +964 @@ libcache = [ - {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, + {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, @@ -970 +970 @@ libqueue = [ - {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, + {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index cd6c434d..892564e1 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -11 +11 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index e1a66e23..26b06c36 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -359 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -823 +823 @@ name = "libcache" -version = "0.3.3" +version = "0.3.4" @@ -838 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" @@ -866 +866 @@ name = "libqueue" -version = "0.4.7" +version = "0.4.8" @@ -882 +882 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" @@ -2294 +2294 @@ python-versions = "3.9.6" -content-hash = "296995eaabadcadd118c7fed822ba929ca4b36add4e423c9228e5b3147158c3d" +content-hash = "d4b3c0e736a68bc46a6e39d50054af6c5d5424952ec118fb9097f6f89aae60f8" @@ -2624 +2624 @@ libcache = [ - {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, + {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, @@ -2631 +2631 @@ libqueue = [ - {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, + {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 562e8978..1cadae71 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -19 +19 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false } diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index e1a66e23..d8327665 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -823 +823 @@ name = "libcache" -version = "0.3.3" +version = "0.3.4" @@ -838 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl" @@ -866 +866 @@ name = "libqueue" -version = "0.4.7" +version = "0.4.8" @@ -882 +882 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl" @@ -2294 +2294 @@ python-versions = "3.9.6" -content-hash = "296995eaabadcadd118c7fed822ba929ca4b36add4e423c9228e5b3147158c3d" +content-hash = "d4b3c0e736a68bc46a6e39d50054af6c5d5424952ec118fb9097f6f89aae60f8" @@ -2624 +2624 @@ libcache = [ - {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, + {file = "libcache-0.3.4-py3-none-any.whl", hash = "sha256:efd9a0a3912dd71d60f174e0238e49af377e87088349c5fb87fef9126f83641b"}, @@ -2631 +2631 @@ libqueue = [ - {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, + {file = "libqueue-0.4.8-py3-none-any.whl", hash = "sha256:ea2376efb371dce391f4574d8a9595e99e20b318a3399e910d32251676f5cb24"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index ec351e91..0546794e 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -19 +19 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.4-py3-none-any.whl", develop = false } @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.8-py3-none-any.whl", develop = false }
f35071fd76e9b7f48a34a3a9928d88d070b89171
Sylvain Lesage
2022-10-27T12:28:42
Limit the started jobs per "dataset namespace" (#626)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index c4eab53f..ca1dbb97 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5,2 +5,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-8801495", - "api": "huggingface/datasets-server-services-api:sha-c42fe9a" + "admin": "huggingface/datasets-server-services-admin:sha-06e3fab", + "api": "huggingface/datasets-server-services-api:sha-06e3fab" @@ -9,2 +9,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-8801495", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-8801495" + "splits": "huggingface/datasets-server-workers-splits:sha-06e3fab", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-06e3fab" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index a7839acd..267fe7a8 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -149,2 +149,2 @@ splits: - # Maximum number of jobs running at the same time for the same dataset - maxJobsPerDataset: 4 + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 4 @@ -166,2 +166,2 @@ firstRows: - # Maximum number of jobs running at the same time for the same dataset - maxJobsPerDataset: 17 + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 17 diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index a6497910..d36bb62d 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -23,2 +23,2 @@ - - name: QUEUE_MAX_JOBS_PER_DATASET - # value: {{ .Values.queue.maxJobsPerDataset | quote }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} @@ -26 +26 @@ - value: {{ .Values.firstRows.queue.maxJobsPerDataset | quote }} + value: {{ .Values.firstRows.queue.maxJobsPerNamespace | quote }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 6e28763a..fe473b83 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -23,2 +23,2 @@ - - name: QUEUE_MAX_JOBS_PER_DATASET - # value: {{ .Values.queue.maxJobsPerDataset | quote }} + - name: QUEUE_MAX_JOBS_PER_NAMESPACE + # value: {{ .Values.queue.maxJobsPerNamespace | quote }} @@ -26 +26 @@ - value: {{ .Values.splits.queue.maxJobsPerDataset | quote }} + value: {{ .Values.splits.queue.maxJobsPerNamespace | quote }} diff --git a/chart/values.yaml b/chart/values.yaml index f025d025..c1bc92ea 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -43,2 +43,2 @@ queue: - # Maximum number of jobs running at the same time for the same dataset - maxJobsPerDataset: 1 + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 @@ -169,2 +169,2 @@ splits: - # Maximum number of jobs running at the same time for the same dataset - maxJobsPerDataset: 1 + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 @@ -194,2 +194,2 @@ firstRows: - # Maximum number of jobs running at the same time for the same dataset - maxJobsPerDataset: 1 + # Maximum number of jobs running at the same time for the same namespace + maxJobsPerNamespace: 1 diff --git a/docs/source/server.mdx b/docs/source/server.mdx index 6421a173..da02dfde 100644 --- a/docs/source/server.mdx +++ b/docs/source/server.mdx @@ -5,2 +5,2 @@ The Datasets Server has two main components that work together to return queries -* a user-facing web API for exploring and returning information about a dataset -* a server runs the queries ahead of time and caches them in a database +- a user-facing web API for exploring and returning information about a dataset +- a server runs the queries ahead of time and caches them in a database @@ -14 +14 @@ There are three elements that keep the server running: the job queue, workers, a -The job queue is a list of jobs stored in a Mongo database that should be completed by the workers. The jobs are practically identical to the endpoints the user uses; only the server runs the jobs ahead of time, and the user gets the results when they use the endpoint. +The job queue is a list of jobs stored in a Mongo database that should be completed by the workers. The jobs are practically identical to the endpoints the user uses; only the server runs the jobs ahead of time, and the user gets the results when they use the endpoint. @@ -25 +25 @@ You might've noticed the `/valid` and `/is-valid` endpoints don't have a job in -Workers are responsible for executing the jobs in the queue. They complete the actual preprocessing requests, such as getting a list of splits and configurations. The workers can be controlled by configurable environment variables, like the minimum or the maximum number of rows returned by a worker or the maximum number of jobs to start per dataset. +Workers are responsible for executing the jobs in the queue. They complete the actual preprocessing requests, such as getting a list of splits and configurations. The workers can be controlled by configurable environment variables, like the minimum or the maximum number of rows returned by a worker or the maximum number of jobs to start per dataset user or organization. @@ -31 +31 @@ Take a look at the [workers configuration](https://github.com/huggingface/datase -Once the workers complete a job, the results are stored - or *cached* - in a Mongo database. When a user makes a request with an endpoint like `/first-rows`, Datasets Server retrieves the preprocessed response from the cache, and serves it to the user. This eliminates the time a user would've waited if the server hadn't already completed the job and stored the response. +Once the workers complete a job, the results are stored - or _cached_ - in a Mongo database. When a user makes a request with an endpoint like `/first-rows`, Datasets Server retrieves the preprocessed response from the cache, and serves it to the user. This eliminates the time a user would've waited if the server hadn't already completed the job and stored the response. diff --git a/libs/libqueue/README.md b/libs/libqueue/README.md index 6ef2c40e..4f00eb04 100644 --- a/libs/libqueue/README.md +++ b/libs/libqueue/README.md @@ -9 +9 @@ Set environment variables to configure the following aspects: -- `QUEUE_MAX_JOBS_PER_DATASET`: the maximum number of started jobs for the same dataset. Defaults to 1. +- `QUEUE_MAX_JOBS_PER_NAMESPACE`: the maximum number of started jobs for the same namespace (the user or organization, before the `/` separator in the dataset name, or the "canonical" dataset name if not present). Defaults to 1. diff --git a/libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl new file mode 100644 index 00000000..871c198f Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.7.tar.gz b/libs/libqueue/dist/libqueue-0.4.7.tar.gz new file mode 100644 index 00000000..06f35d3f Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.7.tar.gz differ diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index 183d31de..6aad1231 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.4.6" +version = "0.4.7" @@ -33,0 +34 @@ requires = ["poetry-core>=1.0.0"] +# addopts = "-k 'wip'" @@ -34,0 +36,3 @@ filterwarnings = ["ignore::DeprecationWarning"] +markers = [ + "wip: tests being developed" +] diff --git a/libs/libqueue/src/libqueue/config.py b/libs/libqueue/src/libqueue/config.py index 45056a77..7be56966 100644 --- a/libs/libqueue/src/libqueue/config.py +++ b/libs/libqueue/src/libqueue/config.py @@ -10 +10 @@ class QueueConfig: - max_jobs_per_dataset: int + max_jobs_per_namespace: int @@ -22 +22 @@ class QueueConfig: - self.max_jobs_per_dataset = env.int(name="MAX_JOBS_PER_DATASET", default=1) + self.max_jobs_per_namespace = env.int(name="MAX_JOBS_PER_NAMESPACE", default=1) diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index a9157d94..55c536a8 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -6,0 +7 @@ import types +from collections import Counter @@ -7,0 +9,2 @@ from datetime import datetime, timezone +from itertools import groupby +from operator import itemgetter @@ -47,0 +51,2 @@ class JobDict(TypedDict): + unicity_id: str + namespace: str @@ -93,0 +99,3 @@ class Job(Document): + unicity_id (`str`): A string that identifies the job uniquely. Only one job with the same unicity_id can be in + the started state. + namespace (`str`): The dataset namespace (user or organization) if any, else the dataset name (canonical name). @@ -101 +109,2 @@ class Job(Document): - "collection": "jobs", + "collection": "jobs_blue", + # ^ https://en.wikipedia.org/wiki/Blue-green_deployment @@ -113,0 +123,3 @@ class Job(Document): + unicity_id = StringField(required=True) + namespace = StringField(required=True) + split = StringField() @@ -124,0 +137,2 @@ class Job(Document): + "unicity_id": self.unicity_id, + "namespace": self.namespace, @@ -131,3 +144,0 @@ class Job(Document): - def to_id(self) -> str: - return f"Job[{self.type}][{self.dataset}][{self.config}][{self.split}]" - @@ -144,3 +155,3 @@ class Queue: - - a job is identified by its input arguments: dataset, and optionally config and split - - a job can be in one of the following states: waiting, started, success, error, cancelled - - a job can be in the queue only once in the "started" state + - a job is identified by its input arguments: unicity_id (type, dataset, config and split) + - a job can be in one of the following states: waiting, started, success, error, cancelled, skipped + - a job can be in the queue only once (unicity_id) in the "started" state @@ -149,2 +160,2 @@ class Queue: - - datasets that already have started jobs are de-prioritized - - datasets cannot have more than `max_jobs_per_dataset` started jobs + - datasets and users that already have started jobs are de-prioritized (using namespace) + - no more than `max_jobs_per_namespace` started jobs can exist for the same namespace @@ -154,2 +165,4 @@ class Queue: - max_jobs_per_dataset (`int`): Maximum number of started jobs for the same dataset. 0 or a negative value - are ignored. Defaults to None. + max_jobs_per_namespace (`int`): Maximum number of started jobs for the same namespace. We call a namespace the + part of the dataset name that is before the `/` separator (user or organization). If `/` is not present, + which is the case for the "canonical" datasets, the namespace is the dataset name. + 0 or a negative value are ignored. Defaults to None. @@ -158 +171 @@ class Queue: - def __init__(self, type: str, max_jobs_per_dataset: Optional[int] = None): + def __init__(self, type: str, max_jobs_per_namespace: Optional[int] = None): @@ -160,2 +173,2 @@ class Queue: - self.max_jobs_per_dataset = ( - None if max_jobs_per_dataset is None or max_jobs_per_dataset < 1 else max_jobs_per_dataset + self.max_jobs_per_namespace = ( + None if max_jobs_per_namespace is None or max_jobs_per_namespace < 1 else max_jobs_per_namespace @@ -173,0 +187,2 @@ class Queue: + unicity_id=f"Job[{self.type}][{dataset}][{config}][{split}]", + namespace=dataset.split("/")[0], @@ -178,2 +193,2 @@ class Queue: - def start_job(self) -> Tuple[str, str, Optional[str], Optional[str]]: - """Start the next job in the queue. + def get_next_waiting_job(self) -> Job: + """Get the next job in the queue. @@ -181,6 +196,5 @@ class Queue: - Get the next job in the queue, among the datasets that still have no started job. - If no job is available, get the next job in the queue, among the datasets that already have a started job, - but not more than `max_jobs_per_dataset` jobs per dataset, and not with the same set of arguments - (dataset, config, split). - - The job is moved from the waiting state to the started state. + Get the waiting job with the oldest creation date: + - first, among the datasets that still have no started job. + - if none, among the datasets that have the least started jobs: + - in the limit of `max_jobs_per_namespace` jobs per namespace + - ensuring that the unicity_id field is unique among the started jobs. @@ -189,2 +203 @@ class Queue: - EmptyQueueError: if there is no job in the queue, within the limit of the maximum number of started jobs - for a dataset + EmptyQueueError: if there is no waiting job in the queue that satisfies the restrictions above. @@ -192 +205 @@ class Queue: - Returns: the job id and the input arguments: dataset, config and split + Returns: the job @@ -194,6 +207,3 @@ class Queue: - # try to get a job for a dataset that still has no started job - started_job_arguments = { - (job.dataset, job.config, job.split) - for job in Job.objects(type=self.type, status=Status.STARTED).only("dataset", "config", "split") - } - started_datasets = [job_arguments[0] for job_arguments in started_job_arguments] + started_jobs = Job.objects(type=self.type, status=Status.STARTED) + started_job_namespaces = [job.namespace for job in started_jobs.only("namespace")] + @@ -201 +211,5 @@ class Queue: - Job.objects(type=self.type, status=Status.WAITING, dataset__nin=started_datasets) + Job.objects( + type=self.type, + status=Status.WAITING, + namespace__nin=set(started_job_namespaces), + ) @@ -202,0 +217 @@ class Queue: + .only("dataset", "config", "split") @@ -207,13 +222,28 @@ class Queue: - if next_waiting_job is None: - # the waiting jobs are all for datasets that already have started jobs. - # let's take the next one, in the limit of max_jobs_per_dataset - # and without the same arguments (dataset, config, split) - excluded_datasets = ( - [] - if self.max_jobs_per_dataset is None - else list( - { - dataset - for dataset in started_datasets - if started_datasets.count(dataset) >= self.max_jobs_per_dataset - } + if next_waiting_job is not None: + return next_waiting_job + + # all the waiting jobs, if any, are for namespaces that already have started jobs. + # + # Let's: + # - exclude the waiting jobs for datasets that already have too many started jobs (max_jobs_per_namespace) + # - exclude the waiting jobs which unicity_id is already in a started job + # and, among the remaining waiting jobs, let's: + # - select the oldest waiting job for the namespace with the least number of started jobs + started_unicity_ids = {job.unicity_id for job in started_jobs.only("unicity_id")} + descending_frequency_namespace_counts = [ + [namespace, count] + for namespace, count in Counter(started_job_namespaces).most_common() + if self.max_jobs_per_namespace is None or count < self.max_jobs_per_namespace + ] + descending_frequency_namespace_groups = [ + [item[0] for item in data] for (_, data) in groupby(descending_frequency_namespace_counts, itemgetter(1)) + ] + # maybe we could get rid of this loop + while descending_frequency_namespace_groups: + least_common_namespaces_group = descending_frequency_namespace_groups.pop() + next_waiting_job = ( + Job.objects( + type=self.type, + status=Status.WAITING, + namespace__in=least_common_namespaces_group, + unicity_id__nin=started_unicity_ids, @@ -221,4 +250,0 @@ class Queue: - ) - # probably sub-optimal: ideally we should not loop here, neither create a list of all the waiting jobs - waiting_jobs = list( - Job.objects(type=self.type, status=Status.WAITING, dataset__nin=excluded_datasets) @@ -225,0 +252 @@ class Queue: + .only("dataset", "config", "split") @@ -226,0 +254 @@ class Queue: + .first() @@ -228,12 +256,19 @@ class Queue: - while waiting_jobs: - next_waiting_job = waiting_jobs.pop(0) - if ( - next_waiting_job.dataset, - next_waiting_job.config, - next_waiting_job.split, - ) not in started_job_arguments: - break - else: - raise EmptyQueueError( - "no job available (within the limit of {max_jobs_per_dataset} started jobs per dataset)" - ) + if next_waiting_job is not None: + return next_waiting_job + raise EmptyQueueError( + f"no job available (within the limit of {self.max_jobs_per_namespace} started jobs per namespace)" + ) + + def start_job(self) -> Tuple[str, str, Optional[str], Optional[str]]: + """Start the next job in the queue. + + The job is moved from the waiting state to the started state. + + Raises: + EmptyQueueError: if there is no job in the queue, within the limit of the maximum number of started jobs + for a dataset + + Returns: the job id and the input arguments: dataset, config and split + """ + next_waiting_job = self.get_next_waiting_job() + # ^ can raise EmptyQueueError @@ -262 +297 @@ class Queue: - f"job {job.to_id()} has a not the STARTED status ({job.status.value}). Force finishing anyway." + f"job {job.unicity_id} has a not the STARTED status ({job.status.value}). Force finishing anyway." @@ -265 +300 @@ class Queue: - logging.warning(f"job {job.to_id()} has a non-empty finished_at field. Force finishing anyway.") + logging.warning(f"job {job.unicity_id} has a non-empty finished_at field. Force finishing anyway.") @@ -267 +302 @@ class Queue: - logging.warning(f"job {job.to_id()} has an empty started_at field. Force finishing anyway.") + logging.warning(f"job {job.unicity_id} has an empty started_at field. Force finishing anyway.") diff --git a/libs/libqueue/tests/test_queue.py b/libs/libqueue/tests/test_queue.py index e6dcc16a..4378402c 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libqueue/tests/test_queue.py @@ -63,0 +64 @@ def test_priority_to_non_started_datasets() -> None: + queue.add_job(dataset="dataset1/dataset", config="config", split="split1") @@ -65 +65,0 @@ def test_priority_to_non_started_datasets() -> None: - queue.add_job(dataset="dataset1", config="config", split="split3") @@ -79 +79,4 @@ def test_priority_to_non_started_datasets() -> None: - assert dataset == "dataset1" + assert dataset == "dataset1/dataset" + assert split == "split1" + _, dataset, __, split = queue.start_job() + assert dataset == "dataset2" @@ -83,3 +85,0 @@ def test_priority_to_non_started_datasets() -> None: - assert split == "split3" - _, dataset, __, split = queue.start_job() - assert dataset == "dataset2" @@ -94,2 +94,2 @@ def test_priority_to_non_started_datasets() -> None: [email protected]("max_jobs_per_dataset", [(None), (-5), (0), (1), (2)]) -def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: [email protected]("max_jobs_per_namespace", [(None), (-5), (0), (1), (2)]) +def test_max_jobs_per_namespace(max_jobs_per_namespace: Optional[int]) -> None: @@ -99 +99 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - queue = Queue(test_type, max_jobs_per_dataset=max_jobs_per_dataset) + queue = Queue(test_type, max_jobs_per_namespace=max_jobs_per_namespace) @@ -109 +109 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - if max_jobs_per_dataset == 1: + if max_jobs_per_namespace == 1: @@ -115 +115 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - if max_jobs_per_dataset == 2: + if max_jobs_per_namespace == 2: @@ -119 +119 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - # max_jobs_per_dataset <= 0 and max_jobs_per_dataset == None are the same + # max_jobs_per_namespace <= 0 and max_jobs_per_namespace == None are the same diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 2b3e9067..54b409ef 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -342 +342 @@ name = "libqueue" -version = "0.4.6" +version = "0.4.7" @@ -358 +358 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" @@ -863 +863 @@ python-versions = "3.9.6" -content-hash = "575e7d4fe04e898046e3fbceb27363033bde5c70cfe735584bbde6f169d08132" +content-hash = "62ccd2d3d62a9c5f2bdafe5472d7a55439dbe5a295063fb50f85fb1de8ec6152" @@ -930 +930 @@ libqueue = [ - {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, + {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index b02e9955..f9293363 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 94800c0d..0eac8206 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -358 +358 @@ name = "libqueue" -version = "0.4.6" +version = "0.4.7" @@ -374 +374 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" @@ -905 +905 @@ python-versions = "3.9.6" -content-hash = "521a38f1d65874a2887d923d3f23c97b2b1f92dbe51470cace80334657c346f3" +content-hash = "50b8e3501d30bea0fbcc237aebe4db07cbfa1556ba9107880e6a811b4b4101f0" @@ -970 +970 @@ libqueue = [ - {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, + {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 7bdd65e3..cd6c434d 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } diff --git a/tools/docker-compose-datasets-server-from-local-code.yml b/tools/docker-compose-datasets-server-from-local-code.yml index d95ee70c..ac7272af 100644 --- a/tools/docker-compose-datasets-server-from-local-code.yml +++ b/tools/docker-compose-datasets-server-from-local-code.yml @@ -28 +28 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} @@ -62 +62 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} @@ -97 +97 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} @@ -125 +125 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml index 14340ac7..c180d315 100644 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ b/tools/docker-compose-datasets-server-from-remote-images.yml @@ -26 +26 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} @@ -58 +58 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} @@ -91 +91 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} @@ -117 +117 @@ services: - QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_JOBS_PER_NAMESPACE: ${QUEUE_MAX_JOBS_PER_NAMESPACE-1} diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index b9008f32..e1a66e23 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -359 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -866 +866 @@ name = "libqueue" -version = "0.4.6" +version = "0.4.7" @@ -882 +882 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" @@ -2294 +2294 @@ python-versions = "3.9.6" -content-hash = "5530681c1178a328bc5f4c896e35ec8a6276cdc75e2fbb39e8e1ea481641f228" +content-hash = "296995eaabadcadd118c7fed822ba929ca4b36add4e423c9228e5b3147158c3d" @@ -2631 +2631 @@ libqueue = [ - {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, + {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 726f380c..562e8978 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/utils.py b/workers/first_rows/src/first_rows/utils.py index fa89ffdb..a6bf597c 100644 --- a/workers/first_rows/src/first_rows/utils.py +++ b/workers/first_rows/src/first_rows/utils.py @@ -157,3 +157,3 @@ class Queues: - def __init__(self, max_jobs_per_dataset: Optional[int] = None): - self.splits = Queue(type=JobType.SPLITS.value, max_jobs_per_dataset=max_jobs_per_dataset) - self.first_rows = Queue(type=JobType.FIRST_ROWS.value, max_jobs_per_dataset=max_jobs_per_dataset) + def __init__(self, max_jobs_per_namespace: Optional[int] = None): + self.splits = Queue(type=JobType.SPLITS.value, max_jobs_per_namespace=max_jobs_per_namespace) + self.first_rows = Queue(type=JobType.FIRST_ROWS.value, max_jobs_per_namespace=max_jobs_per_namespace) diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index aaafc1f3..7db7001c 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -29 +29 @@ class FirstRowsWorker(Worker): - self._queues = Queues(max_jobs_per_dataset=worker_config.queue.max_jobs_per_dataset) + self._queues = Queues(max_jobs_per_namespace=worker_config.queue.max_jobs_per_namespace) diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 77cea4f6..e1a66e23 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -866 +866 @@ name = "libqueue" -version = "0.4.6" +version = "0.4.7" @@ -882 +882 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl" @@ -2294 +2294 @@ python-versions = "3.9.6" -content-hash = "5530681c1178a328bc5f4c896e35ec8a6276cdc75e2fbb39e8e1ea481641f228" +content-hash = "296995eaabadcadd118c7fed822ba929ca4b36add4e423c9228e5b3147158c3d" @@ -2631 +2631 @@ libqueue = [ - {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, + {file = "libqueue-0.4.7-py3-none-any.whl", hash = "sha256:0c6e5ca692786d86228bcebef54344004d9edccc0f499accf27bdc4ed3277b9e"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 79203d73..ec351e91 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.7-py3-none-any.whl", develop = false } diff --git a/workers/splits/src/splits/utils.py b/workers/splits/src/splits/utils.py index c0727b1e..85761750 100644 --- a/workers/splits/src/splits/utils.py +++ b/workers/splits/src/splits/utils.py @@ -70,3 +70,3 @@ class Queues: - def __init__(self, max_jobs_per_dataset: Optional[int] = None): - self.splits = Queue(type=JobType.SPLITS.value, max_jobs_per_dataset=max_jobs_per_dataset) - self.first_rows = Queue(type=JobType.FIRST_ROWS.value, max_jobs_per_dataset=max_jobs_per_dataset) + def __init__(self, max_jobs_per_namespace: Optional[int] = None): + self.splits = Queue(type=JobType.SPLITS.value, max_jobs_per_namespace=max_jobs_per_namespace) + self.first_rows = Queue(type=JobType.FIRST_ROWS.value, max_jobs_per_namespace=max_jobs_per_namespace) diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index 5b25412c..54f3d252 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -32 +32 @@ class SplitsWorker(Worker): - self._queues = Queues(max_jobs_per_dataset=worker_config.queue.max_jobs_per_dataset) + self._queues = Queues(max_jobs_per_namespace=worker_config.queue.max_jobs_per_namespace)
88a627da6ee47d9080b61946e620bdc44f448feb
Sylvain Lesage
2022-10-26T14:17:19
feat: 🎸 only sleep for 5 seconds (#625)
diff --git a/chart/values.yaml b/chart/values.yaml index a182b3e7..f025d025 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -52 +52 @@ queue: - sleepSeconds: 15 + sleepSeconds: 5
4ab71a111f38df2b7da11974c1c0f3fc44ec0f33
Sylvain Lesage
2022-10-26T14:08:49
Store and compare worker+dataset repo versions (#624)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 84257b33..c4eab53f 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5,2 +5,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-3513a2b", - "api": "huggingface/datasets-server-services-api:sha-9cc0bbe" + "admin": "huggingface/datasets-server-services-admin:sha-8801495", + "api": "huggingface/datasets-server-services-api:sha-c42fe9a" @@ -9,2 +9,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-a5134c4", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-2365545" + "splits": "huggingface/datasets-server-workers-splits:sha-8801495", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-8801495" diff --git a/libs/libcache/dist/libcache-0.3.2-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.2-py3-none-any.whl new file mode 100644 index 00000000..077036b5 Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.2-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.3.2.tar.gz b/libs/libcache/dist/libcache-0.3.2.tar.gz new file mode 100644 index 00000000..196ff086 Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.2.tar.gz differ diff --git a/libs/libcache/dist/libcache-0.3.3-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.3-py3-none-any.whl new file mode 100644 index 00000000..91702f7b Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.3-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.3.3.tar.gz b/libs/libcache/dist/libcache-0.3.3.tar.gz new file mode 100644 index 00000000..6896a00d Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.3.tar.gz differ diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index 6e28c497..b6a984dd 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -5 +5 @@ name = "libcache" -version = "0.3.1" +version = "0.3.3" diff --git a/libs/libcache/src/libcache/simple_cache.py b/libs/libcache/src/libcache/simple_cache.py index 9a2e1c87..ff59845f 100644 --- a/libs/libcache/src/libcache/simple_cache.py +++ b/libs/libcache/src/libcache/simple_cache.py @@ -59,0 +60,2 @@ class SplitsResponse(Document): + worker_version = StringField(required=False) + dataset_git_revision = StringField(required=False) @@ -86,0 +89,2 @@ class FirstRowsResponse(Document): + worker_version = StringField(required=False) + dataset_git_revision = StringField(required=False) @@ -113,0 +118,2 @@ def upsert_splits_response( + worker_version: Optional[str] = None, + dataset_git_revision: Optional[str] = None, @@ -121,0 +128,2 @@ def upsert_splits_response( + worker_version=worker_version, + dataset_git_revision=dataset_git_revision, @@ -132,0 +141,8 @@ def mark_splits_responses_as_stale(dataset_name: str): +class SplitsCacheEntry(TypedDict): + response: Dict + http_status: HTTPStatus + error_code: Optional[str] + worker_version: Optional[str] + dataset_git_revision: Optional[str] + + @@ -134 +150 @@ def mark_splits_responses_as_stale(dataset_name: str): -def get_splits_response(dataset_name: str) -> Tuple[Dict, HTTPStatus, Optional[str]]: +def get_splits_response(dataset_name: str) -> SplitsCacheEntry: @@ -136 +152,7 @@ def get_splits_response(dataset_name: str) -> Tuple[Dict, HTTPStatus, Optional[s - return split_response.response, split_response.http_status, split_response.error_code + return { + "response": split_response.response, + "http_status": split_response.http_status, + "error_code": split_response.error_code, + "worker_version": split_response.worker_version, + "dataset_git_revision": split_response.dataset_git_revision, + } @@ -148,0 +171,2 @@ def upsert_first_rows_response( + worker_version: Optional[str] = None, + dataset_git_revision: Optional[str] = None, @@ -156,0 +181,2 @@ def upsert_first_rows_response( + worker_version=worker_version, + dataset_git_revision=dataset_git_revision, @@ -181,0 +208,9 @@ def mark_first_rows_responses_as_stale( +# Note: it's the same definition as SplitsCacheEntry +class FirstRowsCacheEntry(TypedDict): + response: Dict + http_status: HTTPStatus + error_code: Optional[str] + worker_version: Optional[str] + dataset_git_revision: Optional[str] + + @@ -183,3 +218 @@ def mark_first_rows_responses_as_stale( -def get_first_rows_response( - dataset_name: str, config_name: str, split_name: str -) -> Tuple[Dict, HTTPStatus, Optional[str]]: +def get_first_rows_response(dataset_name: str, config_name: str, split_name: str) -> FirstRowsCacheEntry: @@ -189 +222,7 @@ def get_first_rows_response( - return first_rows_response.response, first_rows_response.http_status, first_rows_response.error_code + return { + "response": first_rows_response.response, + "http_status": first_rows_response.http_status, + "error_code": first_rows_response.error_code, + "worker_version": first_rows_response.worker_version, + "dataset_git_revision": first_rows_response.dataset_git_revision, + } @@ -276,0 +316,2 @@ class SplitsResponseReport(TypedDict): + worker_version: Optional[str] + dataset_git_revision: Optional[str] @@ -333 +374,5 @@ def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: - objects = list(queryset.order_by("+id").only("id", "dataset_name", "http_status", "error_code").limit(limit)) + objects = list( + queryset.order_by("+id") + .only("id", "dataset_name", "http_status", "error_code", "worker_version", "dataset_git_revision") + .limit(limit) + ) @@ -340,0 +386,2 @@ def get_cache_reports_splits(cursor: str, limit: int) -> CacheReportSplits: + "worker_version": object.worker_version, + "dataset_git_revision": object.dataset_git_revision, @@ -381 +428,10 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo - .only("id", "dataset_name", "config_name", "split_name", "http_status", "error_code") + .only( + "id", + "dataset_name", + "config_name", + "split_name", + "http_status", + "error_code", + "worker_version", + "dataset_git_revision", + ) @@ -391,0 +448,2 @@ def get_cache_reports_first_rows(cursor: Optional[str], limit: int) -> CacheRepo + "worker_version": object.worker_version, + "dataset_git_revision": object.dataset_git_revision, diff --git a/libs/libcache/tests/test_simple_cache.py b/libs/libcache/tests/test_simple_cache.py index 69abc590..20aa09cd 100644 --- a/libs/libcache/tests/test_simple_cache.py +++ b/libs/libcache/tests/test_simple_cache.py @@ -43,4 +43,6 @@ def test_upsert_splits_response() -> None: - response1, http_status, error_code = get_splits_response(dataset_name) - assert http_status == HTTPStatus.OK - assert response1 == response - assert error_code is None + cache_entry = get_splits_response(dataset_name) + assert cache_entry["http_status"] == HTTPStatus.OK + assert cache_entry["response"] == response + assert cache_entry["error_code"] is None + assert cache_entry["worker_version"] is None + assert cache_entry["dataset_git_revision"] is None @@ -50,2 +52,2 @@ def test_upsert_splits_response() -> None: - (response2, _, _) = get_splits_response(dataset_name) - assert response2 == response1 + cache_entry2 = get_splits_response(dataset_name) + assert cache_entry2 == cache_entry @@ -65,5 +67,17 @@ def test_upsert_splits_response() -> None: - upsert_splits_response(dataset_name, response, HTTPStatus.BAD_REQUEST, "error_code") - response3, http_status, error_code = get_splits_response(dataset_name) - assert response3 == response - assert http_status == HTTPStatus.BAD_REQUEST - assert error_code == "error_code" + error_code = "error_code" + worker_version = "0.1.2" + dataset_git_revision = "123456" + upsert_splits_response( + dataset_name, + response, + HTTPStatus.BAD_REQUEST, + error_code=error_code, + worker_version=worker_version, + dataset_git_revision=dataset_git_revision, + ) + cache_entry3 = get_splits_response(dataset_name) + assert cache_entry3["http_status"] == HTTPStatus.BAD_REQUEST + assert cache_entry3["response"] == response + assert cache_entry3["error_code"] == error_code + assert cache_entry3["worker_version"] == worker_version + assert cache_entry3["dataset_git_revision"] == dataset_git_revision @@ -78,3 +92,6 @@ def test_upsert_first_rows_response() -> None: - response1, http_status, _ = get_first_rows_response(dataset_name, config_name, split_name) - assert http_status == HTTPStatus.OK - assert response1 == response + cache_entry = get_first_rows_response(dataset_name, config_name, split_name) + assert cache_entry["http_status"] == HTTPStatus.OK + assert cache_entry["response"] == response + assert cache_entry["error_code"] is None + assert cache_entry["worker_version"] is None + assert cache_entry["dataset_git_revision"] is None @@ -84,2 +101,2 @@ def test_upsert_first_rows_response() -> None: - (response2, _, _) = get_first_rows_response(dataset_name, config_name, split_name) - assert response2 == response1 + cache_entry2 = get_first_rows_response(dataset_name, config_name, split_name) + assert cache_entry2 == cache_entry @@ -104,0 +122,20 @@ def test_upsert_first_rows_response() -> None: + error_code = "error_code" + worker_version = "0.1.2" + dataset_git_revision = "123456" + upsert_first_rows_response( + dataset_name, + config_name, + split_name, + response, + HTTPStatus.BAD_REQUEST, + error_code=error_code, + worker_version=worker_version, + dataset_git_revision=dataset_git_revision, + ) + cache_entry3 = get_first_rows_response(dataset_name, config_name, split_name) + assert cache_entry3["http_status"] == HTTPStatus.BAD_REQUEST + assert cache_entry3["response"] == response + assert cache_entry3["error_code"] == error_code + assert cache_entry3["worker_version"] == worker_version + assert cache_entry3["dataset_git_revision"] == dataset_git_revision + @@ -258,0 +296,2 @@ def test_get_cache_reports_splits() -> None: + worker_version = "0.1.2" + dataset_git_revision = "123456" @@ -263,2 +302,4 @@ def test_get_cache_reports_splits() -> None: - "ErrorCodeB", - b_details, + error_code="ErrorCodeB", + details=b_details, + worker_version=worker_version, + dataset_git_revision=dataset_git_revision, @@ -283 +324,7 @@ def test_get_cache_reports_splits() -> None: - {"dataset": "a", "http_status": HTTPStatus.OK.value, "error_code": None}, + { + "dataset": "a", + "http_status": HTTPStatus.OK.value, + "error_code": None, + "worker_version": None, + "dataset_git_revision": None, + }, @@ -287,0 +335,2 @@ def test_get_cache_reports_splits() -> None: + "worker_version": "0.1.2", + "dataset_git_revision": "123456", @@ -299,0 +349,2 @@ def test_get_cache_reports_splits() -> None: + "worker_version": None, + "dataset_git_revision": None, @@ -327,0 +379,2 @@ def test_get_cache_reports_first_rows() -> None: + worker_version = "0.1.2" + dataset_git_revision = "123456" @@ -334,2 +387,4 @@ def test_get_cache_reports_first_rows() -> None: - "ErrorCodeB", - b_details, + error_code="ErrorCodeB", + details=b_details, + worker_version=worker_version, + dataset_git_revision=dataset_git_revision, @@ -356 +411,9 @@ def test_get_cache_reports_first_rows() -> None: - {"dataset": "a", "config": "config", "split": "split", "http_status": HTTPStatus.OK.value, "error_code": None}, + { + "dataset": "a", + "config": "config", + "split": "split", + "http_status": HTTPStatus.OK.value, + "error_code": None, + "worker_version": None, + "dataset_git_revision": None, + }, @@ -362,0 +426,2 @@ def test_get_cache_reports_first_rows() -> None: + "worker_version": "0.1.2", + "dataset_git_revision": "123456", @@ -376,0 +442,2 @@ def test_get_cache_reports_first_rows() -> None: + "worker_version": None, + "dataset_git_revision": None, diff --git a/libs/libqueue/dist/libqueue-0.4.3-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.3-py3-none-any.whl new file mode 100644 index 00000000..c182b934 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.3-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.3.tar.gz b/libs/libqueue/dist/libqueue-0.4.3.tar.gz new file mode 100644 index 00000000..a4ad1859 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.3.tar.gz differ diff --git a/libs/libqueue/dist/libqueue-0.4.4-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.4-py3-none-any.whl new file mode 100644 index 00000000..9604b84f Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.4-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.4.tar.gz b/libs/libqueue/dist/libqueue-0.4.4.tar.gz new file mode 100644 index 00000000..4ceaf592 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.4.tar.gz differ diff --git a/libs/libqueue/dist/libqueue-0.4.5-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.5-py3-none-any.whl new file mode 100644 index 00000000..3366aa31 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.5-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.5.tar.gz b/libs/libqueue/dist/libqueue-0.4.5.tar.gz new file mode 100644 index 00000000..9c35a07e Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.5.tar.gz differ diff --git a/libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl new file mode 100644 index 00000000..98fe99d9 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.6.tar.gz b/libs/libqueue/dist/libqueue-0.4.6.tar.gz new file mode 100644 index 00000000..b8660a45 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.6.tar.gz differ diff --git a/libs/libqueue/poetry.lock b/libs/libqueue/poetry.lock index 20f3b444..007dc1d4 100644 --- a/libs/libqueue/poetry.lock +++ b/libs/libqueue/poetry.lock @@ -620 +620 @@ python-versions = "3.9.6" -content-hash = "9e0fbfb54d61767cd7d6c92f871004acf033be695b75f6c262e9c758ab094c82" +content-hash = "6fd9055ec2fc0f191dd3fb2848cc44b721eddd0caccb377ddc6a73274a06a214" diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index 993063a6..183d31de 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.4.2" +version = "0.4.6" @@ -11,0 +12 @@ mongoengine = "^0.24.1" +packaging = "^21.3" diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index 57d6ef75..a9157d94 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -8 +8 @@ from datetime import datetime, timezone -from typing import Generic, List, Optional, Tuple, Type, TypedDict, TypeVar +from typing import Generic, List, Literal, Optional, Tuple, Type, TypedDict, TypeVar @@ -11 +10,0 @@ from mongoengine import Document, DoesNotExist, connect -from mongoengine.errors import MultipleObjectsReturned @@ -40,0 +40 @@ class Status(enum.Enum): + SKIPPED = "skipped" @@ -59,0 +60 @@ class CountByStatus(TypedDict): + skipped: int @@ -83,3 +84,2 @@ def connect_to_database(database: str, host: str) -> None: -# - cancelled: cancelled_at is not None: cancelled jobs -# For a given set of arguments, any number of finished and cancelled jobs are allowed, -# but only 0 or 1 job for the set of the other states +# For a given set of arguments, only one job is allowed in the started state. No +# restriction for the other states @@ -146,2 +146,2 @@ class Queue: - - a job can be in the queue only once in a pending state (waiting or started) - - a job can be in the queue multiple times in a finished state (success, error, cancelled) + - a job can be in the queue only once in the "started" state + - a job can be in the queue multiple times in the other states (waiting, success, error, cancelled, skipped) @@ -149 +149 @@ class Queue: - - datasets that already have started job are de-prioritized + - datasets that already have started jobs are de-prioritized @@ -167,3 +166,0 @@ class Queue: - If a job with the same arguments already exists in the queue in a pending state (waiting, started), no new job - is created and the existing job is returned. - @@ -172,2 +169 @@ class Queue: - existing_jobs = Job.objects(type=self.type, dataset=dataset, config=config, split=split) - new_job = Job( + return Job( @@ -180,14 +176 @@ class Queue: - ) - pending_jobs = existing_jobs.filter(status__in=[Status.WAITING, Status.STARTED]) - try: - # If one non-finished job exists, return it - return pending_jobs.get() - except DoesNotExist: - # None exist, create one - return new_job.save() - except MultipleObjectsReturned: - # should not happen, but it's not enforced in the database - # (we could have one in WAITING status and another one in STARTED status) - # if it happens, we "cancel" all of them, and re-run the same function - pending_jobs.update(finished_at=get_datetime(), status=Status.CANCELLED) - return self.add_job(dataset=dataset, config=config, split=split) + ).save() @@ -200 +183,2 @@ class Queue: - but not more than `max_jobs_per_dataset` jobs per dataset. + but not more than `max_jobs_per_dataset` jobs per dataset, and not with the same set of arguments + (dataset, config, split). @@ -211 +195,5 @@ class Queue: - started_datasets = [job.dataset for job in Job.objects(type=self.type, status=Status.STARTED).only("dataset")] + started_job_arguments = { + (job.dataset, job.config, job.split) + for job in Job.objects(type=self.type, status=Status.STARTED).only("dataset", "config", "split") + } + started_datasets = [job_arguments[0] for job_arguments in started_job_arguments] @@ -221,0 +210 @@ class Queue: + # and without the same arguments (dataset, config, split) @@ -233 +222,2 @@ class Queue: - next_waiting_job = ( + # probably sub-optimal: ideally we should not loop here, neither create a list of all the waiting jobs + waiting_jobs = list( @@ -237,5 +226,0 @@ class Queue: - .first() - ) - if next_waiting_job is None: - raise EmptyQueueError( - "no job available (within the limit of {max_jobs_per_dataset} started jobs per dataset)" @@ -242,0 +228,12 @@ class Queue: + while waiting_jobs: + next_waiting_job = waiting_jobs.pop(0) + if ( + next_waiting_job.dataset, + next_waiting_job.config, + next_waiting_job.split, + ) not in started_job_arguments: + break + else: + raise EmptyQueueError( + "no job available (within the limit of {max_jobs_per_dataset} started jobs per dataset)" + ) @@ -247 +244 @@ class Queue: - def finish_job(self, job_id: str, success: bool) -> None: + def finish_job(self, job_id: str, finished_status: Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED]) -> None: @@ -271,2 +268 @@ class Queue: - status = Status.SUCCESS if success else Status.ERROR - job.update(finished_at=get_datetime(), status=status) + job.update(finished_at=get_datetime(), status=finished_status) @@ -328,0 +325 @@ class Queue: + "skipped": self.count_jobs(status=Status.SKIPPED), diff --git a/libs/libqueue/src/libqueue/worker.py b/libs/libqueue/src/libqueue/worker.py index fe583dcc..0c9c3d0c 100644 --- a/libs/libqueue/src/libqueue/worker.py +++ b/libs/libqueue/src/libqueue/worker.py @@ -8 +8 @@ from abc import ABC, abstractmethod -from typing import Optional +from typing import Literal, Optional @@ -9,0 +10 @@ from typing import Optional +from packaging import version @@ -13 +14,8 @@ from libqueue.config import QueueConfig -from libqueue.queue import EmptyQueueError, Queue +from libqueue.queue import EmptyQueueError, Queue, Status + + +def parse_version(string_version: str) -> version.Version: + parsed_version = version.parse(string_version) + if isinstance(parsed_version, version.LegacyVersion): + raise ValueError(f"LegacyVersion is not supported: {parsed_version}") + return parsed_version @@ -17,0 +26 @@ class Worker(ABC): + version: str @@ -24 +33 @@ class Worker(ABC): - def __init__(self, queue_config: QueueConfig) -> None: + def __init__(self, queue_config: QueueConfig, version: str) -> None: @@ -25,0 +35 @@ class Worker(ABC): + self.version = version @@ -79,0 +90 @@ class Worker(ABC): + finished_status: Literal[Status.SUCCESS, Status.ERROR, Status.SKIPPED] @@ -82,4 +93,10 @@ class Worker(ABC): - success = self.compute( - dataset=dataset, - config=config, - split=split, + finished_status = ( + Status.SKIPPED + if self.should_skip_job(dataset=dataset, config=config, split=split) + else Status.SUCCESS + if self.compute( + dataset=dataset, + config=config, + split=split, + ) + else Status.ERROR @@ -86,0 +104,3 @@ class Worker(ABC): + except Exception: + logging.exception(f"error while computing {parameters_for_log}") + finished_status = Status.ERROR @@ -88,3 +108,2 @@ class Worker(ABC): - self.queue.finish_job(job_id=job_id, success=success) - result = "success" if success else "error" - logging.debug(f"job finished with {result}: {job_id} for {parameters_for_log}") + self.queue.finish_job(job_id=job_id, finished_status=finished_status) + logging.debug(f"job finished with {finished_status.value}: {job_id} for {parameters_for_log}") @@ -92,0 +112,27 @@ class Worker(ABC): + def compare_major_version(self, other_version: str) -> int: + """ + Compare the major version of worker's self version and the other version's. + + Args: + other_version (:obj:`str`): the other semantic version + + Returns: + :obj:`int`: the difference between the major version of both versions. + 0 if they are equal. Negative if worker's major version is lower than other_version, positive otherwise. + Raises: + :obj:`ValueError`: if worker's version or other_version is not a valid semantic version. + """ + try: + return parse_version(self.version).major - parse_version(other_version).major + except Exception as err: + raise RuntimeError(f"Could not get major versions: {err}") from err + + @abstractmethod + def should_skip_job( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + ) -> bool: + pass + diff --git a/libs/libqueue/tests/test_queue.py b/libs/libqueue/tests/test_queue.py index c15f3a29..e6dcc16a 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libqueue/tests/test_queue.py @@ -8,8 +8 @@ import pytest -from libqueue.queue import ( - EmptyQueueError, - Job, - Queue, - Status, - _clean_queue_database, - get_datetime, -) +from libqueue.queue import EmptyQueueError, Queue, Status, _clean_queue_database @@ -30 +23 @@ def test_add_job() -> None: - # a second call is ignored + # a second call adds a second waiting job @@ -39 +32,2 @@ def test_add_job() -> None: - # adding the job while the first one has not finished yet is ignored + # adding the job while the first one has not finished yet adds another waiting job + # (there are no limits to the number of waiting jobs) @@ -42 +36 @@ def test_add_job() -> None: - # thus: no new job available + # but: it's not possible to start two jobs with the same arguments @@ -45 +39,13 @@ def test_add_job() -> None: - queue.finish_job(job_id=job_id, success=True) + queue.finish_job(job_id=job_id, finished_status=Status.SUCCESS) + # the queue is not empty + assert queue.is_job_in_process(dataset=test_dataset) is True + # process the second job + job_id, *_ = queue.start_job() + queue.finish_job(job_id=job_id, finished_status=Status.SUCCESS) + # and the third one + job_id, *_ = queue.start_job() + other_job_id = ("1" if job_id[0] == "0" else "0") + job_id[1:] + # trying to finish another job fails silently (with a log) + queue.finish_job(job_id=other_job_id, finished_status=Status.SUCCESS) + # finish it + queue.finish_job(job_id=job_id, finished_status=Status.SUCCESS) @@ -48,0 +55 @@ def test_add_job() -> None: + # an error is raised if we try to start a job @@ -50,47 +56,0 @@ def test_add_job() -> None: - # add a job again - queue.add_job(dataset=test_dataset) - # start it - job_id, *_ = queue.start_job() - other_job_id = ("1" if job_id[0] == "0" else "0") + job_id[1:] - queue.finish_job(job_id=other_job_id, success=True) - # ^ fails silently (with a log) - queue.finish_job(job_id=job_id, success=True) - - -def test_add_job_with_broken_collection() -> None: - test_type = "test_type" - test_dataset = "dataset_broken" - test_config = "config_broken" - test_split = "split_broken" - # ensure the jobs are cancelled with more than one exist in a "pending" status - # we "manually" create two jobs in a "pending" status for the same split - # (we normally cannot do that with the exposed methods) - job_1 = Job( - type=test_type, - dataset=test_dataset, - config=test_config, - split=test_split, - created_at=get_datetime(), - status=Status.WAITING, - ).save() - job_2 = Job( - type=test_type, - dataset=test_dataset, - config=test_config, - split=test_split, - created_at=get_datetime(), - started_at=get_datetime(), - status=Status.STARTED, - ).save() - # then we add a job: it should create a new job in the "WAITING" status - # and the two other jobs should be cancelled - queue = Queue(test_type) - queue.add_job(dataset=test_dataset, config=test_config, split=test_split) - assert ( - Job.objects( - type=test_type, dataset=test_dataset, config=test_config, split=test_split, status__in=[Status.WAITING] - ).count() - == 1 - ) - assert Job.objects(pk=job_1.pk).get().status == Status.CANCELLED - assert Job.objects(pk=job_2.pk).get().status == Status.CANCELLED @@ -102,0 +63 @@ def test_priority_to_non_started_datasets() -> None: + queue.add_job(dataset="dataset1", config="config", split="split1") @@ -126,0 +88,3 @@ def test_priority_to_non_started_datasets() -> None: + # raises even if there is still a waiting job + # (dataset="dataset1", config="config", split="split1") + # because a job with the same arguments is already started @@ -146 +109,0 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - @@ -158 +121 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - queue.finish_job(job_id, success=True) + queue.finish_job(job_id, finished_status=Status.SUCCESS) @@ -169,2 +132,2 @@ def test_count_by_status() -> None: - expected_empty = {"waiting": 0, "started": 0, "success": 0, "error": 0, "cancelled": 0} - expected_one_waiting = {"waiting": 1, "started": 0, "success": 0, "error": 0, "cancelled": 0} + expected_empty = {"waiting": 0, "started": 0, "success": 0, "error": 0, "cancelled": 0, "skipped": 0} + expected_one_waiting = {"waiting": 1, "started": 0, "success": 0, "error": 0, "cancelled": 0, "skipped": 0} diff --git a/libs/libqueue/tests/test_worker.py b/libs/libqueue/tests/test_worker.py new file mode 100644 index 00000000..ebb53db7 --- /dev/null +++ b/libs/libqueue/tests/test_worker.py @@ -0,0 +1,40 @@ +import pytest + +from libqueue.worker import parse_version + +from .utils import DummyWorker + + [email protected]( + "string_version, expected_major_version, should_raise", + [ + ("1.0.0", 1, False), + ("3.1.2", 3, False), + ("1.1", 1, False), + ("not a version", None, True), + ], +) +def test_parse_version(string_version: str, expected_major_version: int, should_raise: bool) -> None: + if should_raise: + with pytest.raises(Exception): + parse_version(string_version) + else: + assert parse_version(string_version).major == expected_major_version + + [email protected]( + "worker_version, other_version, expected, should_raise", + [ + ("1.0.0", "1.0.1", 0, False), + ("1.0.0", "2.0.1", -1, False), + ("2.0.0", "1.0.1", 1, False), + ("not a version", "1.0.1", None, True), + ], +) +def test_compare_major_version(worker_version: str, other_version: str, expected: int, should_raise: bool) -> None: + worker = DummyWorker(version=worker_version) + if should_raise: + with pytest.raises(Exception): + worker.compare_major_version(other_version) + else: + assert worker.compare_major_version(other_version) == expected diff --git a/libs/libqueue/tests/utils.py b/libs/libqueue/tests/utils.py new file mode 100644 index 00000000..dc79e080 --- /dev/null +++ b/libs/libqueue/tests/utils.py @@ -0,0 +1,25 @@ +from typing import Optional + +from libqueue.config import QueueConfig +from libqueue.queue import Queue +from libqueue.worker import Worker + + +class DummyWorker(Worker): + def __init__(self, version: str): + super().__init__(queue_config=QueueConfig(), version=version) + + @property + def queue(self) -> Queue: + return Queue("queue_type") + + def should_skip_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> bool: + return super().should_skip_job(dataset, config, split) + + def compute( + self, + dataset: str, + config: Optional[str] = None, + split: Optional[str] = None, + ) -> bool: + return super().compute(dataset, config, split) diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index 56bc11f1..2b3e9067 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -307 +307 @@ name = "libcache" -version = "0.3.1" +version = "0.3.3" @@ -322 +322 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" @@ -342 +342 @@ name = "libqueue" -version = "0.4.2" +version = "0.4.6" @@ -351,0 +352 @@ mongoengine = ">=0.24.1,<0.25.0" +packaging = ">=21.3,<22.0" @@ -357 +358 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" @@ -862 +863 @@ python-versions = "3.9.6" -content-hash = "3b9dd1abc61cff1ada376c6fd38e78f0a500f63181966e98e0c7a98994573459" +content-hash = "575e7d4fe04e898046e3fbceb27363033bde5c70cfe735584bbde6f169d08132" @@ -923 +924 @@ libcache = [ - {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, + {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, @@ -929 +930 @@ libqueue = [ - {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, + {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 9d40d57d..b02e9955 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -11 +11 @@ huggingface-hub = "^0.8.1" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 3968b09f..94800c0d 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -323 +323 @@ name = "libcache" -version = "0.3.1" +version = "0.3.3" @@ -338 +338 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" @@ -358 +358 @@ name = "libqueue" -version = "0.4.2" +version = "0.4.6" @@ -367,0 +368 @@ mongoengine = ">=0.24.1,<0.25.0" +packaging = ">=21.3,<22.0" @@ -373 +374 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" @@ -904 +905 @@ python-versions = "3.9.6" -content-hash = "8ef5a2fce9713c54622d086aafcc10d434fc8ec13430d22a715845c4206b8f19" +content-hash = "521a38f1d65874a2887d923d3f23c97b2b1f92dbe51470cace80334657c346f3" @@ -963 +964 @@ libcache = [ - {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, + {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, @@ -969 +970 @@ libqueue = [ - {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, + {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index a8156b8d..7bdd65e3 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -11 +11 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } @@ -13 +13 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } diff --git a/services/api/src/api/dataset.py b/services/api/src/api/dataset.py index 3c961670..02ef31bb 100644 --- a/services/api/src/api/dataset.py +++ b/services/api/src/api/dataset.py @@ -88,2 +88,2 @@ def is_first_rows_in_process( - response, http_status, _ = get_splits_response(dataset) - if http_status == HTTPStatus.OK and any( + result = get_splits_response(dataset) + if result["http_status"] == HTTPStatus.OK and any( @@ -91 +91 @@ def is_first_rows_in_process( - for split_item in response["splits"] + for split_item in result["response"]["splits"] diff --git a/services/api/src/api/routes/first_rows.py b/services/api/src/api/routes/first_rows.py index 1a5b693d..cd9abaef 100644 --- a/services/api/src/api/routes/first_rows.py +++ b/services/api/src/api/routes/first_rows.py @@ -47 +47,4 @@ def create_first_rows_endpoint( - response, http_status, error_code = get_first_rows_response(dataset, config, split) + result = get_first_rows_response(dataset, config, split) + response = result["response"] + http_status = result["http_status"] + error_code = result["error_code"] diff --git a/services/api/src/api/routes/splits.py b/services/api/src/api/routes/splits.py index 0a441bac..0d40b69d 100644 --- a/services/api/src/api/routes/splits.py +++ b/services/api/src/api/routes/splits.py @@ -45 +45,4 @@ def create_splits_endpoint( - response, http_status, error_code = get_splits_response(dataset) + result = get_splits_response(dataset) + response = result["response"] + http_status = result["http_status"] + error_code = result["error_code"] diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 6bba12c8..b9008f32 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -359 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -786,0 +787 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" + @@ -822 +823 @@ name = "libcache" -version = "0.3.1" +version = "0.3.3" @@ -837 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" @@ -865 +866 @@ name = "libqueue" -version = "0.4.2" +version = "0.4.6" @@ -874,0 +876 @@ mongoengine = ">=0.24.1,<0.25.0" +packaging = ">=21.3,<22.0" @@ -880 +882 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" @@ -2292 +2294 @@ python-versions = "3.9.6" -content-hash = "cf5836d4de1661adb7edad80e268a276fafad82aaa7ba5e0c6bbe70d9c2f5a49" +content-hash = "5530681c1178a328bc5f4c896e35ec8a6276cdc75e2fbb39e8e1ea481641f228" @@ -2622 +2624 @@ libcache = [ - {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, + {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, @@ -2629 +2631 @@ libqueue = [ - {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, + {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 2541e20e..726f380c 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -19 +19 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/response.py b/workers/first_rows/src/first_rows/response.py index 4f90c926..25c9983c 100644 --- a/workers/first_rows/src/first_rows/response.py +++ b/workers/first_rows/src/first_rows/response.py @@ -59,0 +60,5 @@ class FirstRowsResponse(TypedDict): +class FirstRowsResponseResult(TypedDict): + first_rows_response: FirstRowsResponse + dataset_git_revision: Optional[str] + + @@ -253 +258,33 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, -def get_first_rows_response( +def get_dataset_git_revision( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> Union[str, None]: + """ + Get the git revision of the dataset. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `Union[str, None]`: the dataset git revision (sha) if any. + <Tip> + Raises the following errors: + - [`~worker.exceptions.DatasetNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + </Tip> + """ + use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False + try: + dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + return dataset_info.sha + + +def compute_first_rows_response( @@ -266 +303 @@ def get_first_rows_response( -) -> FirstRowsResponse: +) -> FirstRowsResponseResult: @@ -318,4 +355,2 @@ def get_first_rows_response( - try: - HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + # try to get the dataset config info. It raises if the dataset does not exist or is private + dataset_git_revision = get_dataset_git_revision(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) @@ -428,5 +463,8 @@ def get_first_rows_response( - "dataset": dataset, - "config": config, - "split": split, - "features": to_features_list(dataset, config, split, features), - "rows": row_items, + "first_rows_response": { + "dataset": dataset, + "config": config, + "split": split, + "features": to_features_list(dataset, config, split, features), + "rows": row_items, + }, + "dataset_git_revision": dataset_git_revision, diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index ec35b4e2..aaafc1f3 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -3,0 +4 @@ +import importlib.metadata @@ -8 +9 @@ from typing import Optional -from libcache.simple_cache import upsert_first_rows_response +from libcache.simple_cache import get_first_rows_response, upsert_first_rows_response @@ -12 +13 @@ from first_rows.config import WorkerConfig -from first_rows.response import get_first_rows_response +from first_rows.response import compute_first_rows_response, get_dataset_git_revision @@ -27 +28 @@ class FirstRowsWorker(Worker): - super().__init__(queue_config=worker_config.queue) + super().__init__(queue_config=worker_config.queue, version=importlib.metadata.version(__package__)) @@ -34,0 +36,34 @@ class FirstRowsWorker(Worker): + def should_skip_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> bool: + """Return True if the job should be skipped, False otherwise. + + The job must be skipped if: + - a cache entry exists for the dataset + - and the result was successful + - and it has been created with the same major version of the worker + - and it has been created with the exact same git commit of the dataset repository + + Args: + dataset (:obj:`str`): The name of the dataset. + config (:obj:`str`, `optional`): The name of the configuration. + split (:obj:`str`, `optional`): The name of the split. + + Returns: + :obj:`bool`: True if the job should be skipped, False otherwise. + """ + if config is None or split is None: + return False + try: + cache_entry = get_first_rows_response(dataset_name=dataset, config_name=config, split_name=split) + dataset_git_revision = get_dataset_git_revision( + dataset=dataset, hf_endpoint=self.config.common.hf_endpoint, hf_token=self.config.common.hf_token + ) + return ( + cache_entry["http_status"] == HTTPStatus.OK + and cache_entry["worker_version"] is not None + and self.compare_major_version(cache_entry["worker_version"]) == 0 + and cache_entry["dataset_git_revision"] is not None + and cache_entry["dataset_git_revision"] == dataset_git_revision + ) + except Exception: + return False + @@ -44 +79 @@ class FirstRowsWorker(Worker): - response = get_first_rows_response( + result = compute_first_rows_response( @@ -62 +97 @@ class FirstRowsWorker(Worker): - response=dict(response), + response=dict(result["first_rows_response"]), @@ -63,0 +99,2 @@ class FirstRowsWorker(Worker): + worker_version=self.version, + dataset_git_revision=result["dataset_git_revision"], diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/first_rows/tests/fixtures/hub.py index 8a40c2a7..a2562d01 100644 --- a/workers/first_rows/tests/fixtures/hub.py +++ b/workers/first_rows/tests/fixtures/hub.py @@ -257 +257 @@ HubDatasets = Dict[str, HubDatasetTest] -def get_splits_response(dataset: str, num_bytes: float = None, num_examples: int = None): +def create_splits_response(dataset: str, num_bytes: float = None, num_examples: int = None): @@ -272 +272 @@ def get_splits_response(dataset: str, num_bytes: float = None, num_examples: int -def get_first_rows_response(dataset: str, cols: Dict[str, Any], rows: List[Any]): +def create_first_rows_response(dataset: str, cols: Dict[str, Any], rows: List[Any]): @@ -415,2 +415,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_csv, None, None), - "first_rows_response": get_first_rows_response(hub_public_csv, DATA_cols, DATA_rows), + "splits_response": create_splits_response(hub_public_csv, None, None), + "first_rows_response": create_first_rows_response(hub_public_csv, DATA_cols, DATA_rows), @@ -420,2 +420,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_private_csv, None, None), - "first_rows_response": get_first_rows_response(hub_private_csv, DATA_cols, DATA_rows), + "splits_response": create_splits_response(hub_private_csv, None, None), + "first_rows_response": create_first_rows_response(hub_private_csv, DATA_cols, DATA_rows), @@ -425,2 +425,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_gated_csv, None, None), - "first_rows_response": get_first_rows_response(hub_gated_csv, DATA_cols, DATA_rows), + "splits_response": create_splits_response(hub_gated_csv, None, None), + "first_rows_response": create_first_rows_response(hub_gated_csv, DATA_cols, DATA_rows), @@ -430,2 +430,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_jsonl, None, None), - "first_rows_response": get_first_rows_response(hub_public_jsonl, JSONL_cols, JSONL_rows), + "splits_response": create_splits_response(hub_public_jsonl, None, None), + "first_rows_response": create_first_rows_response(hub_public_jsonl, JSONL_cols, JSONL_rows), @@ -435,2 +435,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_audio, 54.0, 1), - "first_rows_response": get_first_rows_response( + "splits_response": create_splits_response(hub_public_audio, 54.0, 1), + "first_rows_response": create_first_rows_response( @@ -442,2 +442,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_image, 0, 1), - "first_rows_response": get_first_rows_response( + "splits_response": create_splits_response(hub_public_image, 0, 1), + "first_rows_response": create_first_rows_response( @@ -449,2 +449,2 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_images_list, 0, 1), - "first_rows_response": get_first_rows_response( + "splits_response": create_splits_response(hub_public_images_list, 0, 1), + "first_rows_response": create_first_rows_response( diff --git a/workers/first_rows/tests/test_response.py b/workers/first_rows/tests/test_response.py index 205deb0b..0e9d3519 100644 --- a/workers/first_rows/tests/test_response.py +++ b/workers/first_rows/tests/test_response.py @@ -9 +9 @@ from first_rows.config import WorkerConfig -from first_rows.response import get_first_rows_response +from first_rows.response import compute_first_rows_response @@ -49 +49 @@ def test_number_rows( - response = get_first_rows_response( + result = compute_first_rows_response( @@ -63 +63,2 @@ def test_number_rows( - assert response == expected_first_rows_response + assert result["first_rows_response"] == expected_first_rows_response + assert result["dataset_git_revision"] is not None @@ -66 +67 @@ def test_number_rows( - get_first_rows_response( + compute_first_rows_response( diff --git a/workers/first_rows/tests/test_worker.py b/workers/first_rows/tests/test_worker.py index dba55f4f..ef6ef938 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/first_rows/tests/test_worker.py @@ -33,5 +33,6 @@ def test_compute(worker: FirstRowsWorker, hub_public_csv: str) -> None: - response, cached_http_status, error_code = get_first_rows_response( - dataset_name=dataset, config_name=config, split_name=split - ) - assert cached_http_status == HTTPStatus.OK - assert error_code is None + cache_entry = get_first_rows_response(dataset_name=dataset, config_name=config, split_name=split) + assert cache_entry["http_status"] == HTTPStatus.OK + assert cache_entry["error_code"] is None + assert cache_entry["worker_version"] == worker.version + assert cache_entry["dataset_git_revision"] is not None + response = cache_entry["response"] diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 6ca15fb9..77cea4f6 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -359 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -786,0 +787 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" + @@ -822 +823 @@ name = "libcache" -version = "0.3.1" +version = "0.3.3" @@ -837 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl" @@ -865 +866 @@ name = "libqueue" -version = "0.4.2" +version = "0.4.6" @@ -874,0 +876 @@ mongoengine = ">=0.24.1,<0.25.0" +packaging = ">=21.3,<22.0" @@ -880 +882 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl" @@ -2292 +2294 @@ python-versions = "3.9.6" -content-hash = "cf5836d4de1661adb7edad80e268a276fafad82aaa7ba5e0c6bbe70d9c2f5a49" +content-hash = "5530681c1178a328bc5f4c896e35ec8a6276cdc75e2fbb39e8e1ea481641f228" @@ -2622 +2624 @@ libcache = [ - {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, + {file = "libcache-0.3.3-py3-none-any.whl", hash = "sha256:bf43a71767d263849f989ef6bd28fc9f143ef2c5502b4d1ec2de2fe3af6e5d09"}, @@ -2629 +2631 @@ libqueue = [ - {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, + {file = "libqueue-0.4.6-py3-none-any.whl", hash = "sha256:f84c76d79f6e42138ae2abe6726c03f2684e79c757b532611babafe8fc4cd387"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 585a96e9..79203d73 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -5 +5 @@ name = "splits" -version = "0.1.1" +version = "1.0.0" @@ -19 +19 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.3-py3-none-any.whl", develop = false } @@ -21 +21 @@ libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.6-py3-none-any.whl", develop = false } @@ -59 +59 @@ requires = ["poetry-core>=1.0.0"] -addopts = "-k 'not deprecated'" +# addopts = "-k 'wip'" @@ -62,2 +61,0 @@ markers = [ - "deprecated: tests on deprecated code (deselect with '-m \"not deprecated\"')", - "real_dataset: tests on real datasets (from the Hub)", diff --git a/workers/splits/src/splits/response.py b/workers/splits/src/splits/response.py index d4767ae5..7b89616b 100644 --- a/workers/splits/src/splits/response.py +++ b/workers/splits/src/splits/response.py @@ -34,0 +35,5 @@ class SplitsResponse(TypedDict): +class SplitsResponseResult(TypedDict): + splits_response: SplitsResponse + dataset_git_revision: Optional[str] + + @@ -57 +62 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, -def get_splits_response( +def get_dataset_git_revision( @@ -61 +66,33 @@ def get_splits_response( -) -> SplitsResponse: +) -> Union[str, None]: + """ + Get the git revision of the dataset. + Args: + dataset (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + hf_endpoint (`str`): + The Hub endpoint (for example: "https://huggingface.co") + hf_token (`str`, *optional*): + An authentication token (See https://huggingface.co/settings/token) + Returns: + `Union[str, None]`: the dataset git revision (sha) if any. + <Tip> + Raises the following errors: + - [`~worker.exceptions.DatasetNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + </Tip> + """ + use_auth_token: Union[bool, str, None] = hf_token if hf_token is not None else False + try: + dataset_info = HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) + except RepositoryNotFoundError as err: + raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + return dataset_info.sha + + +def compute_splits_response( + dataset: str, + hf_endpoint: str, + hf_token: Optional[str] = None, +) -> SplitsResponseResult: @@ -74 +111,2 @@ def get_splits_response( - [`SplitsResponse`]: The list of splits names. + `SplitsResponseResult`: An object with the splits_response + (list of splits names) and the dataset_git_revision (sha) if any. @@ -86,5 +124,2 @@ def get_splits_response( - # first try to get the dataset config info - try: - HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) - except RepositoryNotFoundError as err: - raise DatasetNotFoundError("The dataset does not exist on the Hub.") from err + # first try to get the dataset config info. It raises if the dataset does not exist or is private + dataset_git_revision = get_dataset_git_revision(dataset=dataset, hf_endpoint=hf_endpoint, hf_token=hf_token) @@ -127 +162,4 @@ def get_splits_response( - return {"splits": split_items} + return { + "splits_response": {"splits": split_items}, + "dataset_git_revision": dataset_git_revision, + } diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index b1c2f4f4..5b25412c 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -3,0 +4 @@ +import importlib.metadata @@ -10,0 +12 @@ from libcache.simple_cache import ( + get_splits_response, @@ -16 +18 @@ from splits.config import WorkerConfig -from splits.response import get_splits_response +from splits.response import compute_splits_response, get_dataset_git_revision @@ -29 +31 @@ class SplitsWorker(Worker): - super().__init__(queue_config=worker_config.queue) + super().__init__(queue_config=worker_config.queue, version=importlib.metadata.version(__package__)) @@ -36,0 +39,32 @@ class SplitsWorker(Worker): + def should_skip_job(self, dataset: str, config: Optional[str] = None, split: Optional[str] = None) -> bool: + """Return True if the job should be skipped, False otherwise. + + The job must be skipped if: + - a cache entry exists for the dataset + - and the result was successful + - and it has been created with the same major version of the worker + - and it has been created with the exact same git commit of the dataset repository + + Args: + dataset (:obj:`str`): The name of the dataset. + config (:obj:`str`, `optional`): The name of the configuration. + split (:obj:`str`, `optional`): The name of the split. + + Returns: + :obj:`bool`: True if the job should be skipped, False otherwise. + """ + try: + cache_entry = get_splits_response(dataset) + dataset_git_revision = get_dataset_git_revision( + dataset=dataset, hf_endpoint=self.config.common.hf_endpoint, hf_token=self.config.common.hf_token + ) + return ( + cache_entry["http_status"] == HTTPStatus.OK + and cache_entry["worker_version"] is not None + and self.compare_major_version(cache_entry["worker_version"]) == 0 + and cache_entry["dataset_git_revision"] is not None + and cache_entry["dataset_git_revision"] == dataset_git_revision + ) + except Exception: + return False + @@ -44 +78 @@ class SplitsWorker(Worker): - response = get_splits_response( + splits_response_result = compute_splits_response( @@ -47 +81,8 @@ class SplitsWorker(Worker): - upsert_splits_response(dataset_name=dataset, response=dict(response), http_status=HTTPStatus.OK) + response = splits_response_result["splits_response"] + upsert_splits_response( + dataset_name=dataset, + response=dict(response), + http_status=HTTPStatus.OK, + worker_version=self.version, + dataset_git_revision=splits_response_result["dataset_git_revision"], + ) diff --git a/workers/splits/tests/fixtures/hub.py b/workers/splits/tests/fixtures/hub.py index e493a1ed..fb98f93c 100644 --- a/workers/splits/tests/fixtures/hub.py +++ b/workers/splits/tests/fixtures/hub.py @@ -230 +230 @@ HubDatasets = Dict[str, HubDatasetTest] -def get_splits_response(dataset: str, num_bytes: float = None, num_examples: int = None): +def create_splits_response(dataset: str, num_bytes: float = None, num_examples: int = None): @@ -245,25 +244,0 @@ def get_splits_response(dataset: str, num_bytes: float = None, num_examples: int -def get_first_rows_response(dataset: str, cols: Dict[str, Any], rows: List[Any]): - dataset, config, split = get_default_config_split(dataset) - return { - "dataset": dataset, - "config": config, - "split": split, - "features": [ - { - "feature_idx": feature_idx, - "name": name, - "type": type, - } - for feature_idx, (name, type) in enumerate(cols.items()) - ], - "rows": [ - { - "row_idx": row_idx, - "truncated_cells": [], - "row": row, - } - for row_idx, row in enumerate(rows) - ], - } - - @@ -328 +303 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_csv, None, None), + "splits_response": create_splits_response(hub_public_csv, None, None), @@ -332 +307 @@ def hub_datasets( - "splits_response": get_splits_response(hub_private_csv, None, None), + "splits_response": create_splits_response(hub_private_csv, None, None), @@ -336 +311 @@ def hub_datasets( - "splits_response": get_splits_response(hub_gated_csv, None, None), + "splits_response": create_splits_response(hub_gated_csv, None, None), @@ -340 +315 @@ def hub_datasets( - "splits_response": get_splits_response(hub_public_audio, 54.0, 1), + "splits_response": create_splits_response(hub_public_audio, 54.0, 1), diff --git a/workers/splits/tests/test_response.py b/workers/splits/tests/test_response.py index 8c131c4d..3ab5c8ac 100644 --- a/workers/splits/tests/test_response.py +++ b/workers/splits/tests/test_response.py @@ -8 +8 @@ from splits.config import WorkerConfig -from splits.response import get_splits_response +from splits.response import compute_splits_response @@ -26 +26 @@ from .fixtures.hub import HubDatasets -def test_get_splits_response_simple_csv( +def test_compute_splits_response_simple_csv( @@ -32 +32 @@ def test_get_splits_response_simple_csv( - splits_response = get_splits_response( + result = compute_splits_response( @@ -37 +37,2 @@ def test_get_splits_response_simple_csv( - assert splits_response == expected_splits_response + assert result["splits_response"] == expected_splits_response + assert result["dataset_git_revision"] is not None @@ -41 +42 @@ def test_get_splits_response_simple_csv( - get_splits_response( + compute_splits_response( diff --git a/workers/splits/tests/test_worker.py b/workers/splits/tests/test_worker.py index ab0ba460..7bab4d15 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/splits/tests/test_worker.py @@ -26,0 +27,14 @@ def worker(worker_config: WorkerConfig) -> SplitsWorker: +def test_version(worker: SplitsWorker) -> None: + assert len(worker.version.split(".")) == 3 + assert worker.compare_major_version(other_version="0.0.0") > 0 + assert worker.compare_major_version(other_version="1000.0.0") < 0 + + +def should_skip_job(worker: SplitsWorker, hub_public_csv: str) -> None: + dataset = hub_public_csv + assert worker.should_skip_job(dataset=dataset) is False + # we add an entry to the cache + worker.compute(dataset=dataset) + assert worker.should_skip_job(dataset=dataset) is True + + @@ -30,3 +44,7 @@ def test_compute(worker: SplitsWorker, hub_public_csv: str) -> None: - response, cached_http_status, error_code = get_splits_response(dataset_name=hub_public_csv) - assert cached_http_status == HTTPStatus.OK - assert error_code is None + cache_entry = get_splits_response(dataset_name=hub_public_csv) + assert cache_entry["http_status"] == HTTPStatus.OK + assert cache_entry["error_code"] is None + assert cache_entry["worker_version"] == worker.version + assert cache_entry["dataset_git_revision"] is not None + assert cache_entry["error_code"] is None + response = cache_entry["response"]
fd45ff0364c7314a3ab771b9f9cb70991b374636
Sylvain Lesage
2022-10-25T19:11:47
feat: 🎸 sort the configs alphabetically (#623)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 7ba2fe64..84257b33 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -9 +9 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-2365545", + "splits": "huggingface/datasets-server-workers-splits:sha-a5134c4", diff --git a/workers/splits/src/splits/response.py b/workers/splits/src/splits/response.py index f906a454..d4767ae5 100644 --- a/workers/splits/src/splits/response.py +++ b/workers/splits/src/splits/response.py @@ -35,0 +36,13 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, + """Get the list of splits full names (split and config) for a dataset. + + Args: + dataset (str): A dataset name. If the repository is namespaced (a user or an organization), the namespace and + the dataset name are separated with a slash (`/`), for example: `user/dataset`. + use_auth_token (Union[bool, str, None], optional): user token. It allows to retrieve the splits for gated + datasets. Defaults to False (no authentication). + + Returns: + List[SplitFullName]: a list of splits full names: objects with the keys `dataset`, `config` and `split`. They + are sorted alphabetically by configuration (config), but the splits order for a given configuration is + preserved. + """ @@ -39 +52 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - for config in get_dataset_config_names(path=dataset, use_auth_token=use_auth_token) + for config in sorted(get_dataset_config_names(path=dataset, use_auth_token=use_auth_token))
f287ba4d44bbcb1f8f22caaf34375ff10ca603a6
Sylvain Lesage
2022-10-25T15:52:12
fix: 🐛 fix hf-token (#622)
diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index f25f232b..5b58d33a 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -40 +40,5 @@ - value: {{ .Values.secrets.hfToken | quote }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.hfToken | quote }} + key: HF_TOKEN + optional: false diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl index 1916b5ba..56eb38bf 100644 --- a/chart/templates/services/api/_container.tpl +++ b/chart/templates/services/api/_container.tpl @@ -40 +40,5 @@ - value: {{ .Values.secrets.hfToken | quote }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.hfToken | quote }} + key: HF_TOKEN + optional: false diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 6bf3df0b..a6497910 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -50 +50,5 @@ - value: {{ .Values.secrets.hfToken | quote }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.hfToken | quote }} + key: HF_TOKEN + optional: false diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 23c68039..6e28763a 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -50 +50,5 @@ - value: {{ .Values.secrets.hfToken | quote }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.hfToken | quote }} + key: HF_TOKEN + optional: false
37e3a8d62324ccbc02bce4c08fdf6109e98c969f
Sylvain Lesage
2022-10-25T14:24:15
test: 💍 missing change in e2e (#621)
diff --git a/e2e/tests/test_21_api_metrics.py b/e2e/tests/test_21_api_metrics.py index 75a6c27b..ae81c77b 100644 --- a/e2e/tests/test_21_api_metrics.py +++ b/e2e/tests/test_21_api_metrics.py @@ -8 +8 @@ from typing import Dict -from .utils import ADMIN_URL, get +from .utils import API_URL, get @@ -19 +19 @@ def test_metrics(): - response = get("/metrics", url=ADMIN_URL) + response = get("/metrics", url=API_URL) @@ -23 +23 @@ def test_metrics(): - metrics = {line.split(" ")[0] for line in lines if line and line[0] != "#"} + metrics = {line.split(" ")[0]: float(line.split(" ")[1]) for line in lines if line and line[0] != "#"} @@ -26,0 +27,6 @@ def test_metrics(): + # the middleware should have recorded the request + name = 'starlette_requests_total{method="GET",path_template="/metrics"}' + assert name in metrics, metrics + assert metrics[name] > 0, metrics + + metrics = set(metrics.keys()) @@ -28,2 +34,2 @@ def test_metrics(): - # eg. 'queue_jobs_total{pid="10",queue="/first-rows",status="started"}' - assert has_metric( + # these metrics are only available in the admin API + assert not has_metric( @@ -31,4 +37,2 @@ def test_metrics(): - ), f"queue_jobs_total - endpoint={endpoint} not found in {metrics}" - # cache should have been filled by the previous tests - # eg. 'responses_in_cache_total{error_code="None",http_status="200",path="/splits",pid="10"}' - assert has_metric( + ), f"queue_jobs_total - endpoint={endpoint} found in {metrics}" + assert not has_metric( @@ -38 +42 @@ def test_metrics(): - ), f"responses_in_cache_total - endpoint {endpoint} not found in {metrics}" + ), f"responses_in_cache_total - endpoint {endpoint} found in {metrics}" diff --git a/e2e/tests/test_31_admin_metrics.py b/e2e/tests/test_31_admin_metrics.py index 75a6c27b..504aa35b 100644 --- a/e2e/tests/test_31_admin_metrics.py +++ b/e2e/tests/test_31_admin_metrics.py @@ -23 +23 @@ def test_metrics(): - metrics = {line.split(" ")[0] for line in lines if line and line[0] != "#"} + metrics = {line.split(" ")[0]: float(line.split(" ")[1]) for line in lines if line and line[0] != "#"} @@ -26,0 +27,6 @@ def test_metrics(): + # the middleware should have recorded the request + name = 'starlette_requests_total{method="GET",path_template="/metrics"}' + assert name in metrics, metrics + assert metrics[name] > 0, metrics + + metrics = set(metrics.keys())
554bcee846addbb263349a43bb0627197a77136a
Sylvain Lesage
2022-10-25T09:38:42
Fix api metrics (#620)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index b1b18d4e..e9a96944 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -55,5 +54,0 @@ jobs: - QUEUE_SLEEP_TIME: "1" - # hard coded, see e2e/tests/fixtures/hub.py - COMMON_HF_ENDPOINT: "https://hub-ci.huggingface.co" - COMMON_HF_TOKEN: "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" - FIRST_ROWS_MAX_NUMBER: "4" @@ -64,0 +60,5 @@ jobs: + QUEUE_SLEEP_TIME: "1" + # hard coded, see e2e/tests/fixtures/hub.py + COMMON_HF_ENDPOINT: "https://hub-ci.huggingface.co" + COMMON_HF_TOKEN: "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" + FIRST_ROWS_MAX_NUMBER: "4" @@ -66,0 +67,2 @@ jobs: + API_UVICORN_NUM_WORKERS: "2" + API_UVICORN_PORT: "8080" @@ -83,0 +86,2 @@ jobs: + API_UVICORN_NUM_WORKERS: "2" + API_UVICORN_PORT: "8080" diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 135c633e..7ba2fe64 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -6 +6 @@ - "api": "huggingface/datasets-server-services-api:sha-3513a2b" + "api": "huggingface/datasets-server-services-api:sha-9cc0bbe" diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl index cd59e76a..1916b5ba 100644 --- a/chart/templates/services/api/_container.tpl +++ b/chart/templates/services/api/_container.tpl @@ -49,2 +48,0 @@ - - name: API_PROMETHEUS_MULTIPROC_DIR - value: {{ .Values.api.prometheusMultiprocDirectory | quote }} @@ -56,0 +55,2 @@ + - name: PROMETHEUS_MULTIPROC_DIR + value: {{ .Values.api.prometheusMultiprocDirectory | quote }} diff --git a/e2e/Makefile b/e2e/Makefile index def92e2f..0e9bbf43 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -8 +7,0 @@ export COMMON_HF_TOKEN := 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 -export ADMIN_UVICORN_PORT := 9081 @@ -9,0 +9 @@ export API_UVICORN_PORT := 9080 +export ADMIN_UVICORN_PORT := 9081 @@ -12,0 +13 @@ export ADMIN_UVICORN_NUM_WORKERS := 2 +export API_UVICORN_NUM_WORKERS := 2 diff --git a/e2e/tests/test_20_api_healthcheck.py b/e2e/tests/test_20_api_healthcheck.py new file mode 100644 index 00000000..7aa175de --- /dev/null +++ b/e2e/tests/test_20_api_healthcheck.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from .utils import API_URL, poll + + +def test_healthcheck(): + # this tests ensures the /healthcheck and the /metrics endpoints are hidden + response = poll("/healthcheck", expected_code=200, url=API_URL) + assert response.status_code == 200, f"{response.status_code} - {response.text}" + assert "ok" in response.text, response.text diff --git a/e2e/tests/test_21_api_metrics.py b/e2e/tests/test_21_api_metrics.py new file mode 100644 index 00000000..75a6c27b --- /dev/null +++ b/e2e/tests/test_21_api_metrics.py @@ -0,0 +1,38 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import os +import re +from typing import Dict + +from .utils import ADMIN_URL, get + + +def has_metric(name: str, labels: Dict[str, str], metrics: set[str]) -> bool: + label_str = ",".join([f'{k}="{v}"' for k, v in labels.items()]) + s = name + "{" + label_str + "}" + return any(re.match(s, metric) is not None for metric in metrics) + + +def test_metrics(): + assert "PROMETHEUS_MULTIPROC_DIR" in os.environ + response = get("/metrics", url=ADMIN_URL) + assert response.status_code == 200, f"{response.status_code} - {response.text}" + content = response.text + lines = content.split("\n") + metrics = {line.split(" ")[0] for line in lines if line and line[0] != "#"} + # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn + assert "process_start_time_seconds" not in metrics + + for endpoint in ["/splits", "/first-rows"]: + # eg. 'queue_jobs_total{pid="10",queue="/first-rows",status="started"}' + assert has_metric( + name="queue_jobs_total", labels={"pid": "[0-9]*", "queue": endpoint, "status": "started"}, metrics=metrics + ), f"queue_jobs_total - endpoint={endpoint} not found in {metrics}" + # cache should have been filled by the previous tests + # eg. 'responses_in_cache_total{error_code="None",http_status="200",path="/splits",pid="10"}' + assert has_metric( + name="responses_in_cache_total", + labels={"error_code": "None", "http_status": "200", "path": endpoint, "pid": "[0-9]*"}, + metrics=metrics, + ), f"responses_in_cache_total - endpoint {endpoint} not found in {metrics}" diff --git a/e2e/tests/utils.py b/e2e/tests/utils.py index afd56b86..52b63ffd 100644 --- a/e2e/tests/utils.py +++ b/e2e/tests/utils.py @@ -14 +14,2 @@ PORT_REVERSE_PROXY = os.environ.get("PORT_REVERSE_PROXY", "8000") -ADMIN_UVICORN_PORT = os.environ.get("ADMIN_UVICORN_PORT", "8080") +API_UVICORN_PORT = os.environ.get("API_UVICORN_PORT", "8080") +ADMIN_UVICORN_PORT = os.environ.get("ADMIN_UVICORN_PORT", "8081") @@ -18,0 +20 @@ ADMIN_URL = f"http://localhost:{ADMIN_UVICORN_PORT}" +API_URL = f"http://localhost:{API_UVICORN_PORT}" diff --git a/services/admin/Makefile b/services/admin/Makefile index 7181f453..a089c975 100644 --- a/services/admin/Makefile +++ b/services/admin/Makefile @@ -28,2 +28 @@ test: - poetry run python -m pytest -vv -x tests/test_prometheus.py - PROMETHEUS_MULTIPROC_DIR=/tmp poetry run python -m pytest -vv -x tests/test_prometheus.py + PROMETHEUS_MULTIPROC_DIR=/tmp poetry run python -m pytest -vv -x -k "test_metrics or test_prometheus" tests diff --git a/services/admin/tests/test_app.py b/services/admin/tests/test_app.py index ce051c89..2322c31c 100644 --- a/services/admin/tests/test_app.py +++ b/services/admin/tests/test_app.py @@ -4 +3,0 @@ -import os @@ -62,2 +60,0 @@ def test_metrics(client: TestClient) -> None: - is_multiprocess = "PROMETHEUS_MULTIPROC_DIR" in os.environ - @@ -69,17 +66,5 @@ def test_metrics(client: TestClient) -> None: - if not is_multiprocess: - name = "process_start_time_seconds" - assert name in metrics - assert metrics[name] > 0 - additional_field = ('pid="' + str(os.getpid()) + '",') if is_multiprocess else "" - for _, job_type in JobType.__members__.items(): - assert "queue_jobs_total{" + additional_field + 'queue="' + job_type.value + '",status="started"}' in metrics - # still empty - assert ( - "responses_in_cache_total{" + additional_field + 'path="/splits",http_status="200",error_code=null}' - not in metrics - ) - assert ( - "responses_in_cache_total{" + additional_field + 'path="/first-rows",http_status="200",error_code=null}' - not in metrics - ) - assert 'starlette_requests_total{method="GET",path_template="/metrics"}' in metrics + + # the middleware should have recorded the request + name = 'starlette_requests_total{method="GET",path_template="/metrics"}' + assert name in metrics, metrics + assert metrics[name] > 0, metrics diff --git a/services/admin/tests/test_prometheus.py b/services/admin/tests/test_prometheus.py index 0da1a83b..24f45481 100644 --- a/services/admin/tests/test_prometheus.py +++ b/services/admin/tests/test_prometheus.py @@ -8,0 +9 @@ def test_prometheus(app_config: AppConfig) -> None: + # we depend on app_config to be sure we already connected to the database @@ -19,2 +20,5 @@ def test_prometheus(app_config: AppConfig) -> None: - if not is_multiprocess: - name = "process_start_time_seconds" + + name = "process_start_time_seconds" + if is_multiprocess: + assert name not in metrics + else: @@ -22,0 +27 @@ def test_prometheus(app_config: AppConfig) -> None: + diff --git a/services/api/Makefile b/services/api/Makefile index a9c9ea6b..3bafe7af 100644 --- a/services/api/Makefile +++ b/services/api/Makefile @@ -11 +11 @@ include ../../tools/Python.mk -include ../../tools/PythonTest.mk +#include ../../tools/PythonTest.mk @@ -20,0 +21,17 @@ watch: + +# override the default test target to test prometheus depending on the environment +# we cannot set the env var with pytest.MonkeyPatch, it's too late +.PHONY: test +test: + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -vv -x tests + PROMETHEUS_MULTIPROC_DIR=/tmp poetry run python -m pytest -vv -x -k "test_metrics or test_prometheus" tests + $(MAKE) down + +.PHONY: coverage +coverage: + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests + $(MAKE) down diff --git a/services/api/README.md b/services/api/README.md index 26e9e119..978520a1 100644 --- a/services/api/README.md +++ b/services/api/README.md @@ -16 +15,0 @@ Set environment variables to configure the application (`API_` prefix): -- `API_PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. @@ -25,0 +25,4 @@ The following environment variables are used to configure the Uvicorn server (`A +### Prometheus + +- `PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. + diff --git a/services/api/src/api/app.py b/services/api/src/api/app.py index 6fde2b09..62590437 100644 --- a/services/api/src/api/app.py +++ b/services/api/src/api/app.py @@ -26 +26 @@ def create_app() -> Starlette: - prometheus = Prometheus(prometheus_multiproc_dir=app_config.api.prometheus_multiproc_dir) + prometheus = Prometheus() diff --git a/services/api/src/api/config.py b/services/api/src/api/config.py index d35b6b5b..24a5519d 100644 --- a/services/api/src/api/config.py +++ b/services/api/src/api/config.py @@ -30 +29,0 @@ class ApiConfig: - prometheus_multiproc_dir: Optional[str] @@ -38,2 +36,0 @@ class ApiConfig: - prometheus_multiproc_dir = env.str(name="PROMETHEUS_MULTIPROC_DIR", default="") - self.prometheus_multiproc_dir = None if prometheus_multiproc_dir == "" else prometheus_multiproc_dir diff --git a/services/api/src/api/prometheus.py b/services/api/src/api/prometheus.py index 44b9d16a..df7366e7 100644 --- a/services/api/src/api/prometheus.py +++ b/services/api/src/api/prometheus.py @@ -4 +4 @@ -from typing import Optional +import os @@ -20,5 +19,0 @@ class Prometheus: - prometheus_multiproc_dir: Optional[str] - - def __init__(self, prometheus_multiproc_dir: Optional[str]): - self.prometheus_multiproc_dir = prometheus_multiproc_dir - @@ -27 +22,2 @@ class Prometheus: - if self.prometheus_multiproc_dir is not None: + # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn + if "PROMETHEUS_MULTIPROC_DIR" in os.environ: @@ -29 +25 @@ class Prometheus: - MultiProcessCollector(registry=registry, path=self.prometheus_multiproc_dir) + MultiProcessCollector(registry=registry) @@ -33,0 +30,3 @@ class Prometheus: + def getLatestContent(self) -> str: + return generate_latest(self.getRegistry()).decode("utf-8") + @@ -35 +34 @@ class Prometheus: - return Response(generate_latest(self.getRegistry()), headers={"Content-Type": CONTENT_TYPE_LATEST}) + return Response(self.getLatestContent(), headers={"Content-Type": CONTENT_TYPE_LATEST}) diff --git a/services/api/tests/test_app.py b/services/api/tests/test_app.py index 845e9e70..6701d56c 100644 --- a/services/api/tests/test_app.py +++ b/services/api/tests/test_app.py @@ -236,5 +236,5 @@ def test_metrics(client: TestClient) -> None: - name = "process_start_time_seconds" - assert name in metrics - assert metrics[name] > 0 - name = "process_start_time_seconds" - assert 'starlette_requests_total{method="GET",path_template="/metrics"}' in metrics + + # the middleware should have recorded the request + name = 'starlette_requests_total{method="GET",path_template="/metrics"}' + assert name in metrics, metrics + assert metrics[name] > 0, metrics diff --git a/services/api/tests/test_prometheus.py b/services/api/tests/test_prometheus.py new file mode 100644 index 00000000..ab549de3 --- /dev/null +++ b/services/api/tests/test_prometheus.py @@ -0,0 +1,22 @@ +import os + +from api.prometheus import Prometheus + + +def test_prometheus() -> None: + is_multiprocess = "PROMETHEUS_MULTIPROC_DIR" in os.environ + + prometheus = Prometheus() + registry = prometheus.getRegistry() + assert registry is not None + + content = prometheus.getLatestContent() + print("content:", content) + lines = content.split("\n") + metrics = {line.split(" ")[0]: float(line.split(" ")[1]) for line in lines if line and line[0] != "#"} + name = "process_start_time_seconds" + if not is_multiprocess: + assert name in metrics, metrics + assert metrics[name] > 0, metrics[name] + else: + assert name not in metrics, metrics diff --git a/tools/docker-compose-datasets-server-from-local-code.yml b/tools/docker-compose-datasets-server-from-local-code.yml index 19a13d0a..d95ee70c 100644 --- a/tools/docker-compose-datasets-server-from-local-code.yml +++ b/tools/docker-compose-datasets-server-from-local-code.yml @@ -75 +75 @@ services: - API_PROMETHEUS_MULTIPROC_DIR: ${API_PROMETHEUS_MULTIPROC_DIR-} + PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml index d87092b7..14340ac7 100644 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ b/tools/docker-compose-datasets-server-from-remote-images.yml @@ -71 +71 @@ services: - API_PROMETHEUS_MULTIPROC_DIR: ${API_PROMETHEUS_MULTIPROC_DIR-} + PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-}
0794216ce17a64afdf35a6623ac4e8f45cea7ba9
Sylvain Lesage
2022-10-25T08:39:03
fix: 🐛 mount the assets directory (#619)
diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 7e3d5187..23c68039 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -61,0 +62,5 @@ + - mountPath: {{ .Values.cache.assetsDirectory | quote }} + mountPropagation: None + name: nfs + subPath: "{{ include "assets.subpath" . }}" + readOnly: false
772fa84f35907fd18a70db298cb4095883081239
Sylvain Lesage
2022-10-24T19:05:36
Fix metrics (#618)
diff --git a/.github/workflows/_build_push_docker_hub.yml b/.github/workflows/_build_push_docker_hub.yml index 4ead5ecb..0a68ba78 100644 --- a/.github/workflows/_build_push_docker_hub.yml +++ b/.github/workflows/_build_push_docker_hub.yml @@ -26 +26 @@ jobs: - uses: actions/checkout@v2 + uses: actions/checkout@v3 @@ -31 +31 @@ jobs: - run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" + run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT @@ -33 +33 @@ jobs: - uses: docker/login-action@v1 + uses: docker/login-action@v2 diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 9ce62b88..b1b18d4e 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -29,6 +29,2 @@ jobs: - # from https://stackoverflow.com/a/61919791/7351594 - content=`cat ${{ inputs.config-file }}` - # the following lines are only required for multi line json - content="${content//'%'/'%25'}" - content="${content//$'\n'/'%0A'}" - content="${content//$'\r'/'%0D'}" + # from https://trstringer.com/github-actions-multiline-strings/ + dockerConfig=`cat ${{ inputs.config-file }}` @@ -36 +32,3 @@ jobs: - echo "::set-output name=dockerConfig::$content" + echo "dockerConfig<<EOF" >> $GITHUB_OUTPUT + echo "$dockerConfig" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT @@ -49 +47 @@ jobs: - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 @@ -66,0 +65,4 @@ jobs: + PORT_REVERSE_PROXY: "8000" + PROMETHEUS_MULTIPROC_DIR: "/tmp" + ADMIN_UVICORN_NUM_WORKERS: "2" + ADMIN_UVICORN_PORT: "8081" @@ -73,0 +76,10 @@ jobs: + env: + QUEUE_SLEEP_TIME: "1" + # hard coded, see e2e/tests/fixtures/hub.py + COMMON_HF_ENDPOINT: "https://hub-ci.huggingface.co" + COMMON_HF_TOKEN: "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" + FIRST_ROWS_MAX_NUMBER: "4" + PORT_REVERSE_PROXY: "8000" + PROMETHEUS_MULTIPROC_DIR: "/tmp" + ADMIN_UVICORN_NUM_WORKERS: "2" + ADMIN_UVICORN_PORT: "8081" diff --git a/.github/workflows/_quality-python.yml b/.github/workflows/_quality-python.yml index 5ddaa915..4fdfe252 100644 --- a/.github/workflows/_quality-python.yml +++ b/.github/workflows/_quality-python.yml @@ -34 +34 @@ jobs: - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index ac854547..b9e7ebcc 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -35 +35 @@ jobs: - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 @@ -56 +56 @@ jobs: - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/openapi-spec.yml b/.github/workflows/openapi-spec.yml index 91dd0a9b..987e870d 100644 --- a/.github/workflows/openapi-spec.yml +++ b/.github/workflows/openapi-spec.yml @@ -29 +29 @@ jobs: - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 9db2736d..ede171dc 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -18 +18 @@ jobs: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 @@ -21 +21 @@ jobs: - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 3ed82223..135c633e 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5,2 +5,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-5dc1a23", - "api": "huggingface/datasets-server-services-api:sha-5dc1a23", + "admin": "huggingface/datasets-server-services-admin:sha-3513a2b", + "api": "huggingface/datasets-server-services-api:sha-3513a2b" @@ -9,2 +9,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-c0290a8", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-c0290a8" + "splits": "huggingface/datasets-server-workers-splits:sha-2365545", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-2365545" diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index a4b4fafb..a7839acd 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -154 +154 @@ splits: - replicas: 10 + replicas: 12 @@ -171 +171 @@ firstRows: - replicas: 32 + replicas: 30 diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl index 90e73fe2..f25f232b 100644 --- a/chart/templates/services/admin/_container.tpl +++ b/chart/templates/services/admin/_container.tpl @@ -51,2 +50,0 @@ - - name: ADMIN_PROMETHEUS_MULTIPROC_DIR - value: {{ .Values.admin.prometheusMultiprocDirectory | quote }} @@ -58,0 +57,2 @@ + - name: PROMETHEUS_MULTIPROC_DIR + value: {{ .Values.admin.prometheusMultiprocDirectory | quote }} diff --git a/e2e/Makefile b/e2e/Makefile index ab5c215e..def92e2f 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -10,0 +11,2 @@ export FIRST_ROWS_MAX_NUMBER := 4 +export PROMETHEUS_MULTIPROC_DIR := /tmp +export ADMIN_UVICORN_NUM_WORKERS := 2 @@ -16,0 +19 @@ include ../tools/Python.mk +include ../tools/PythonTest.mk diff --git a/e2e/tests/test_30_auth.py b/e2e/tests/test_11_auth.py similarity index 100% rename from e2e/tests/test_30_auth.py rename to e2e/tests/test_11_auth.py diff --git a/e2e/tests/test_40_splits.py b/e2e/tests/test_12_splits.py similarity index 100% rename from e2e/tests/test_40_splits.py rename to e2e/tests/test_12_splits.py diff --git a/e2e/tests/test_50_first_rows.py b/e2e/tests/test_13_first_rows.py similarity index 100% rename from e2e/tests/test_50_first_rows.py rename to e2e/tests/test_13_first_rows.py diff --git a/e2e/tests/test_80_valid.py b/e2e/tests/test_14_valid.py similarity index 100% rename from e2e/tests/test_80_valid.py rename to e2e/tests/test_14_valid.py diff --git a/e2e/tests/test_90_is_valid.py b/e2e/tests/test_15_is_valid.py similarity index 100% rename from e2e/tests/test_90_is_valid.py rename to e2e/tests/test_15_is_valid.py diff --git a/e2e/tests/test_30_admin_healthcheck.py b/e2e/tests/test_30_admin_healthcheck.py new file mode 100644 index 00000000..162e5bb3 --- /dev/null +++ b/e2e/tests/test_30_admin_healthcheck.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from .utils import ADMIN_URL, poll + + +def test_healthcheck(): + # this tests ensures the /healthcheck and the /metrics endpoints are hidden + response = poll("/healthcheck", expected_code=200, url=ADMIN_URL) + assert response.status_code == 200, f"{response.status_code} - {response.text}" + assert "ok" in response.text, response.text diff --git a/e2e/tests/test_31_admin_metrics.py b/e2e/tests/test_31_admin_metrics.py new file mode 100644 index 00000000..75a6c27b --- /dev/null +++ b/e2e/tests/test_31_admin_metrics.py @@ -0,0 +1,38 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import os +import re +from typing import Dict + +from .utils import ADMIN_URL, get + + +def has_metric(name: str, labels: Dict[str, str], metrics: set[str]) -> bool: + label_str = ",".join([f'{k}="{v}"' for k, v in labels.items()]) + s = name + "{" + label_str + "}" + return any(re.match(s, metric) is not None for metric in metrics) + + +def test_metrics(): + assert "PROMETHEUS_MULTIPROC_DIR" in os.environ + response = get("/metrics", url=ADMIN_URL) + assert response.status_code == 200, f"{response.status_code} - {response.text}" + content = response.text + lines = content.split("\n") + metrics = {line.split(" ")[0] for line in lines if line and line[0] != "#"} + # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn + assert "process_start_time_seconds" not in metrics + + for endpoint in ["/splits", "/first-rows"]: + # eg. 'queue_jobs_total{pid="10",queue="/first-rows",status="started"}' + assert has_metric( + name="queue_jobs_total", labels={"pid": "[0-9]*", "queue": endpoint, "status": "started"}, metrics=metrics + ), f"queue_jobs_total - endpoint={endpoint} not found in {metrics}" + # cache should have been filled by the previous tests + # eg. 'responses_in_cache_total{error_code="None",http_status="200",path="/splits",pid="10"}' + assert has_metric( + name="responses_in_cache_total", + labels={"error_code": "None", "http_status": "200", "path": endpoint, "pid": "[0-9]*"}, + metrics=metrics, + ), f"responses_in_cache_total - endpoint {endpoint} not found in {metrics}" diff --git a/e2e/tests/utils.py b/e2e/tests/utils.py index c328ea12..afd56b86 100644 --- a/e2e/tests/utils.py +++ b/e2e/tests/utils.py @@ -13,0 +14 @@ PORT_REVERSE_PROXY = os.environ.get("PORT_REVERSE_PROXY", "8000") +ADMIN_UVICORN_PORT = os.environ.get("ADMIN_UVICORN_PORT", "8080") @@ -16,0 +18 @@ URL = f"http://localhost:{PORT_REVERSE_PROXY}" +ADMIN_URL = f"http://localhost:{ADMIN_UVICORN_PORT}" @@ -21 +23 @@ Headers = Dict[str, str] -def get(relative_url: str, headers: Headers = None) -> Response: +def get(relative_url: str, headers: Headers = None, url: str = URL) -> Response: @@ -24 +26 @@ def get(relative_url: str, headers: Headers = None) -> Response: - return requests.get(f"{URL}{relative_url}", headers=headers) + return requests.get(f"{url}{relative_url}", headers=headers) @@ -27 +29 @@ def get(relative_url: str, headers: Headers = None) -> Response: -def post(relative_url: str, json: Optional[Any] = None, headers: Headers = None) -> Response: +def post(relative_url: str, json: Optional[Any] = None, headers: Headers = None, url: str = URL) -> Response: @@ -30 +32 @@ def post(relative_url: str, json: Optional[Any] = None, headers: Headers = None) - return requests.post(f"{URL}{relative_url}", json=json, headers=headers) + return requests.post(f"{url}{relative_url}", json=json, headers=headers) @@ -34 +36,5 @@ def poll( - relative_url: str, error_field: Optional[str] = None, expected_code: Optional[int] = 200, headers: Headers = None + relative_url: str, + error_field: Optional[str] = None, + expected_code: Optional[int] = 200, + headers: Headers = None, + url: str = URL, @@ -46 +52 @@ def poll( - response = get(relative_url, headers) + response = get(relative_url=relative_url, headers=headers, url=url) diff --git a/libs/libcache/Makefile b/libs/libcache/Makefile index bc619378..02d36663 100644 --- a/libs/libcache/Makefile +++ b/libs/libcache/Makefile @@ -8,0 +9 @@ include ../../tools/Python.mk +include ../../tools/PythonTest.mk diff --git a/libs/libcommon/Makefile b/libs/libcommon/Makefile index 3f0a3e34..ba9bb2f2 100644 --- a/libs/libcommon/Makefile +++ b/libs/libcommon/Makefile @@ -6,0 +7 @@ include ../../tools/Python.mk +include ../../tools/PythonTest.mk diff --git a/libs/libqueue/Makefile b/libs/libqueue/Makefile index aa5d9582..e2ed38fa 100644 --- a/libs/libqueue/Makefile +++ b/libs/libqueue/Makefile @@ -8,0 +9 @@ include ../../tools/Python.mk +include ../../tools/PythonTest.mk diff --git a/services/admin/Makefile b/services/admin/Makefile index 0d8ec96d..7181f453 100644 --- a/services/admin/Makefile +++ b/services/admin/Makefile @@ -9,0 +10 @@ include ../../tools/Python.mk +#include ../../tools/PythonTest.mk @@ -18,0 +20,18 @@ watch: + +# override the default test target to test prometheus depending on the environment +# we cannot set the env var with pytest.MonkeyPatch, it's too late +.PHONY: test +test: + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -vv -x tests + poetry run python -m pytest -vv -x tests/test_prometheus.py + PROMETHEUS_MULTIPROC_DIR=/tmp poetry run python -m pytest -vv -x tests/test_prometheus.py + $(MAKE) down + +.PHONY: coverage +coverage: + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests + $(MAKE) down diff --git a/services/admin/README.md b/services/admin/README.md index 6a7c2937..a57ed16a 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -17 +16,0 @@ Set environment variables to configure the application (`ADMIN_` prefix): -- `ADMIN_PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. @@ -26,0 +26,4 @@ The following environment variables are used to configure the Uvicorn server (`A +### Prometheus + +- `PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. + diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 184c20a5..9d40d57d 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -36,0 +37 @@ requires = ["poetry-core>=1.0.0"] +#addopts = "-k 'wip'" @@ -37,0 +39,3 @@ filterwarnings = ["ignore::DeprecationWarning"] +markers = [ + "wip: tests being developed" +] diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index d1decd65..058ed184 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -21 +21 @@ def create_app() -> Starlette: - prometheus = Prometheus(prometheus_multiproc_dir=app_config.admin.prometheus_multiproc_dir) + prometheus = Prometheus() diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index 82250268..17b74c7f 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -31 +30,0 @@ class AdminConfig: - prometheus_multiproc_dir: Optional[str] @@ -41,2 +39,0 @@ class AdminConfig: - prometheus_multiproc_dir = env.str(name="PROMETHEUS_MULTIPROC_DIR", default="") - self.prometheus_multiproc_dir = None if prometheus_multiproc_dir == "" else prometheus_multiproc_dir diff --git a/services/admin/src/admin/prometheus.py b/services/admin/src/admin/prometheus.py index 0dcac720..09855314 100644 --- a/services/admin/src/admin/prometheus.py +++ b/services/admin/src/admin/prometheus.py @@ -4 +4 @@ -from typing import Dict, Optional +import os @@ -25,0 +26,14 @@ from admin.utils import JobType +# the metrics are global to the process +QUEUE_JOBS_TOTAL = Gauge( + name="queue_jobs_total", + documentation="Number of jobs in the queue", + labelnames=["queue", "status"], + multiprocess_mode="liveall", +) +RESPONSES_IN_CACHE_TOTAL = Gauge( + name="responses_in_cache_total", + documentation="Number of cached responses in the cache", + labelnames=["path", "http_status", "error_code"], + multiprocess_mode="liveall", +) + @@ -29 +42,0 @@ class Prometheus: - metrics: Dict[str, Gauge] = {} @@ -31 +43,0 @@ class Prometheus: - prometheus_multiproc_dir: Optional[str] @@ -33,3 +45 @@ class Prometheus: - def __init__(self, prometheus_multiproc_dir: Optional[str]): - self.prometheus_multiproc_dir = prometheus_multiproc_dir - self.initMetrics() + def __init__(self): @@ -41 +51,2 @@ class Prometheus: - if self.prometheus_multiproc_dir is not None: + # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn + if "PROMETHEUS_MULTIPROC_DIR" in os.environ: @@ -43 +54 @@ class Prometheus: - MultiProcessCollector(registry=registry, path=self.prometheus_multiproc_dir) + MultiProcessCollector(registry=registry) @@ -48,14 +58,0 @@ class Prometheus: - # add metrics from the databases - def initMetrics(self): - self.metrics["queue_jobs_total"] = Gauge( - "queue_jobs_total", "Number of jobs in the queue", ["queue", "status"] - ) - self.metrics["cache_entries_total"] = Gauge( - "cache_entries_total", "Number of entries in the cache", ["cache", "status"] - ) - self.metrics["responses_in_cache_total"] = Gauge( - "responses_in_cache_total", - "Number of cached responses in the cache", - ["path", "http_status", "error_code"], - ) - @@ -65 +62 @@ class Prometheus: - self.metrics["queue_jobs_total"].labels(queue=JobType.SPLITS.value, status=status).set(total) + QUEUE_JOBS_TOTAL.labels(queue=JobType.SPLITS.value, status=status).set(total) @@ -67 +64 @@ class Prometheus: - self.metrics["queue_jobs_total"].labels(queue=JobType.FIRST_ROWS.value, status=status).set(total) + QUEUE_JOBS_TOTAL.labels(queue=JobType.FIRST_ROWS.value, status=status).set(total) @@ -71,3 +68,3 @@ class Prometheus: - self.metrics["responses_in_cache_total"].labels( - path="/splits", http_status=http_status, error_code=error_code - ).set(total) + RESPONSES_IN_CACHE_TOTAL.labels(path="/splits", http_status=http_status, error_code=error_code).set( + total + ) @@ -76 +73 @@ class Prometheus: - self.metrics["responses_in_cache_total"].labels( + RESPONSES_IN_CACHE_TOTAL.labels( @@ -80 +77 @@ class Prometheus: - def endpoint(self, request: Request) -> Response: + def getLatestContent(self) -> str: @@ -81,0 +79 @@ class Prometheus: + return generate_latest(self.getRegistry()).decode("utf-8") @@ -83 +81,2 @@ class Prometheus: - return Response(generate_latest(self.getRegistry()), headers={"Content-Type": CONTENT_TYPE_LATEST}) + def endpoint(self, request: Request) -> Response: + return Response(self.getLatestContent(), headers={"Content-Type": CONTENT_TYPE_LATEST}) diff --git a/services/admin/tests/test_app.py b/services/admin/tests/test_app.py index 7f967dd5..ce051c89 100644 --- a/services/admin/tests/test_app.py +++ b/services/admin/tests/test_app.py @@ -3,0 +4 @@ +import os @@ -60,0 +62,2 @@ def test_metrics(client: TestClient) -> None: + is_multiprocess = "PROMETHEUS_MULTIPROC_DIR" in os.environ + @@ -66,4 +69,5 @@ def test_metrics(client: TestClient) -> None: - name = "process_start_time_seconds" - assert name in metrics - assert metrics[name] > 0 - name = "process_start_time_seconds" + if not is_multiprocess: + name = "process_start_time_seconds" + assert name in metrics + assert metrics[name] > 0 + additional_field = ('pid="' + str(os.getpid()) + '",') if is_multiprocess else "" @@ -71 +75 @@ def test_metrics(client: TestClient) -> None: - assert 'queue_jobs_total{queue="' + job_type.value + '",status="started"}' in metrics + assert "queue_jobs_total{" + additional_field + 'queue="' + job_type.value + '",status="started"}' in metrics @@ -73,2 +77,8 @@ def test_metrics(client: TestClient) -> None: - assert 'responses_in_cache_total{path="/splits",http_status="200",error_code=null}' not in metrics - assert 'responses_in_cache_total{path="/first-rows",http_status="200",error_code=null}' not in metrics + assert ( + "responses_in_cache_total{" + additional_field + 'path="/splits",http_status="200",error_code=null}' + not in metrics + ) + assert ( + "responses_in_cache_total{" + additional_field + 'path="/first-rows",http_status="200",error_code=null}' + not in metrics + ) diff --git a/services/admin/tests/test_prometheus.py b/services/admin/tests/test_prometheus.py new file mode 100644 index 00000000..0da1a83b --- /dev/null +++ b/services/admin/tests/test_prometheus.py @@ -0,0 +1,34 @@ +import os + +from admin.config import AppConfig +from admin.prometheus import Prometheus +from admin.utils import JobType + + +def test_prometheus(app_config: AppConfig) -> None: + is_multiprocess = "PROMETHEUS_MULTIPROC_DIR" in os.environ + + prometheus = Prometheus() + registry = prometheus.getRegistry() + assert registry is not None + + content = prometheus.getLatestContent() + print("content:", content) + lines = content.split("\n") + metrics = {line.split(" ")[0]: float(line.split(" ")[1]) for line in lines if line and line[0] != "#"} + if not is_multiprocess: + name = "process_start_time_seconds" + assert name in metrics + assert metrics[name] > 0 + additional_field = ('pid="' + str(os.getpid()) + '",') if is_multiprocess else "" + for _, job_type in JobType.__members__.items(): + assert "queue_jobs_total{" + additional_field + 'queue="' + job_type.value + '",status="started"}' in metrics + # still empty + assert ( + "responses_in_cache_total{" + additional_field + 'path="/splits",http_status="200",error_code=null}' + not in metrics + ) + assert ( + "responses_in_cache_total{" + additional_field + 'path="/first-rows",http_status="200",error_code=null}' + not in metrics + ) diff --git a/services/api/Makefile b/services/api/Makefile index 97518e03..a9c9ea6b 100644 --- a/services/api/Makefile +++ b/services/api/Makefile @@ -10,0 +11 @@ include ../../tools/Python.mk +include ../../tools/PythonTest.mk diff --git a/tools/Python.mk b/tools/Python.mk index 32215a43..30a40fc9 100644 --- a/tools/Python.mk +++ b/tools/Python.mk @@ -32,14 +31,0 @@ style: - -.PHONY: test -test: - $(MAKE) down - $(MAKE) up - poetry run python -m pytest -vv -x tests - $(MAKE) down - -.PHONY: coverage -coverage: - $(MAKE) down - $(MAKE) up - poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests - $(MAKE) down diff --git a/tools/PythonTest.mk b/tools/PythonTest.mk new file mode 100644 index 00000000..85c8d92e --- /dev/null +++ b/tools/PythonTest.mk @@ -0,0 +1,13 @@ +.PHONY: test +test: + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -vv -x tests + $(MAKE) down + +.PHONY: coverage +coverage: + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests + $(MAKE) down diff --git a/tools/docker-compose-datasets-server-from-local-code.yml b/tools/docker-compose-datasets-server-from-local-code.yml index faf96e86..19a13d0a 100644 --- a/tools/docker-compose-datasets-server-from-local-code.yml +++ b/tools/docker-compose-datasets-server-from-local-code.yml @@ -42 +42 @@ services: - ADMIN_PROMETHEUS_MULTIPROC_DIR: ${ADMIN_PROMETHEUS_MULTIPROC_DIR-} + PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml index 15eb2648..d87092b7 100644 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ b/tools/docker-compose-datasets-server-from-remote-images.yml @@ -40 +40 @@ services: - ADMIN_PROMETHEUS_MULTIPROC_DIR: ${ADMIN_PROMETHEUS_MULTIPROC_DIR-} + PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR-} diff --git a/workers/first_rows/Makefile b/workers/first_rows/Makefile index 742e44e2..f5f1a3c7 100644 --- a/workers/first_rows/Makefile +++ b/workers/first_rows/Makefile @@ -10,0 +11 @@ include ../../tools/Python.mk +include ../../tools/PythonTest.mk diff --git a/workers/splits/Makefile b/workers/splits/Makefile index 1abf7176..7273eef7 100644 --- a/workers/splits/Makefile +++ b/workers/splits/Makefile @@ -10,0 +11 @@ include ../../tools/Python.mk +include ../../tools/PythonTest.mk
90d30a4ffb836d9c48a19adaefa46eff4821939c
Sylvain Lesage
2022-10-21T15:40:12
Details (#616)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index caaf3842..3ed82223 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -9,2 +9,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-5dc1a23", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-9b9e8d0" + "splits": "huggingface/datasets-server-workers-splits:sha-c0290a8", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-c0290a8" diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index b23e200b..57d6ef75 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -67,5 +67 @@ class DumpByPendingStatus(TypedDict): -class EmptyQueue(Exception): - pass - - -class JobNotFound(Exception): +class EmptyQueueError(Exception): @@ -209,2 +205,2 @@ class Queue: - EmptyQueue: if there is no job in the queue, within the limit of the maximum number of started jobs for a - dataset + EmptyQueueError: if there is no job in the queue, within the limit of the maximum number of started jobs + for a dataset @@ -244 +240,3 @@ class Queue: - raise EmptyQueue("no job available (within the limit of {max_jobs_per_dataset} started jobs per dataset)") + raise EmptyQueueError( + "no job available (within the limit of {max_jobs_per_dataset} started jobs per dataset)" + ) diff --git a/libs/libqueue/src/libqueue/worker.py b/libs/libqueue/src/libqueue/worker.py index 383e2723..fe583dcc 100644 --- a/libs/libqueue/src/libqueue/worker.py +++ b/libs/libqueue/src/libqueue/worker.py @@ -13 +13 @@ from libqueue.config import QueueConfig -from libqueue.queue import EmptyQueue, Queue +from libqueue.queue import EmptyQueueError, Queue @@ -76 +76 @@ class Worker(ABC): - except EmptyQueue: + except EmptyQueueError: diff --git a/libs/libqueue/tests/test_queue.py b/libs/libqueue/tests/test_queue.py index 3875ba65..c15f3a29 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libqueue/tests/test_queue.py @@ -9 +9 @@ from libqueue.queue import ( - EmptyQueue, + EmptyQueueError, @@ -41 +41 @@ def test_add_job() -> None: - with pytest.raises(EmptyQueue): + with pytest.raises(EmptyQueueError): @@ -48 +48 @@ def test_add_job() -> None: - with pytest.raises(EmptyQueue): + with pytest.raises(EmptyQueueError): @@ -126 +126 @@ def test_priority_to_non_started_datasets() -> None: - with pytest.raises(EmptyQueue): + with pytest.raises(EmptyQueueError): @@ -147 +147 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - with pytest.raises(EmptyQueue): + with pytest.raises(EmptyQueueError): @@ -153 +153 @@ def test_max_jobs_per_dataset(max_jobs_per_dataset: Optional[int]) -> None: - with pytest.raises(EmptyQueue): + with pytest.raises(EmptyQueueError): diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index e19aabc2..6bba12c8 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -359 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -633 +633 @@ name = "grpcio" -version = "1.49.1" +version = "1.50.0" @@ -643 +643 @@ six = ">=1.5.2" -protobuf = ["grpcio-tools (>=1.49.1)"] +protobuf = ["grpcio-tools (>=1.50.0)"] @@ -787 +786,0 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" - @@ -1196 +1195 @@ name = "pandas" -version = "1.5.0" +version = "1.5.1" @@ -1220 +1219 @@ name = "pbr" -version = "5.10.0" +version = "5.11.0" @@ -1315 +1314 @@ name = "psutil" -version = "5.9.2" +version = "5.9.3" @@ -1705 +1704 @@ name = "ruamel.yaml.clib" -version = "0.2.6" +version = "0.2.7" @@ -1752 +1751 @@ name = "scipy" -version = "1.9.2" +version = "1.9.3" @@ -1817 +1816 @@ name = "stevedore" -version = "4.0.1" +version = "4.1.0" @@ -2246 +2245 @@ name = "xxhash" -version = "3.0.0" +version = "3.1.0" @@ -2293 +2292 @@ python-versions = "3.9.6" -content-hash = "2aa2e60943104aea6952a60cdb35c4b05e790723bf35597739125c972afaa4b6" +content-hash = "cf5836d4de1661adb7edad80e268a276fafad82aaa7ba5e0c6bbe70d9c2f5a49" @@ -3314,56 +3313 @@ wrapt = [ -xxhash = [ - {file = "xxhash-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:219cba13991fd73cf21a5efdafa5056f0ae0b8f79e5e0112967e3058daf73eea"}, - {file = "xxhash-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fcbb846af15eff100c412ae54f4974ff277c92eacd41f1ec7803a64fd07fa0c"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f475fa817ff7955fc118fc1ca29a6e691d329b7ff43f486af36c22dbdcff1db"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9200a90f02ff6fd5fb63dea107842da71d8626d99b768fd31be44f3002c60bbe"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1403e4f551c9ef7bcef09af55f1adb169f13e4de253db0887928e5129f87af1"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa7f6ca53170189a2268c83af0980e6c10aae69e6a5efa7ca989f89fff9f8c02"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b63fbeb6d9c93d50ae0dc2b8a8b7f52f2de19e40fe9edc86637bfa5743b8ba2"}, - {file = "xxhash-3.0.0-cp310-cp310-win32.whl", hash = "sha256:31f25efd10b6f1f6d5c34cd231986d8aae9a42e042daa90b783917f170807869"}, - {file = "xxhash-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:807e88ed56e0fb347cb57d5bf44851f9878360fed700f2f63e622ef4eede87a5"}, - {file = "xxhash-3.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6d612c55a75d84d25898f6c5ad6a589aa556d1cb9af770b6c574ee62995167f6"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9309fcaf73f93df3101f03a61dc30644adff3e8d0044fff8c0c195dbbe63e2"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2273fe40720e86346a17f06ef95cd60ee0d66ffce7cf55e390ef7350112b16d"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc6f3a334587c83c5ba56c19b254a97542ce1fc05ccfd66fbf568e6117718d65"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36cf410da5bfcca51ac3c2c51a3317dcd7af91f70fa61eca57fba39554f06ae3"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21752a3e9a2391d91bd51f4aa2fe028ae14ba6a8d37db9ebe00ccac10be5ac4a"}, - {file = "xxhash-3.0.0-cp36-cp36m-win32.whl", hash = "sha256:322068a063ef156455a401ab720f0892f2d2dd1540c1a308e95a7cbf356df51c"}, - {file = "xxhash-3.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2984fa9a880587c0bfa46d32717b2d209863ee68727ea0fc17f05fce25efa692"}, - {file = "xxhash-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6493dd938b360235da81b1c79d8cd048c4f11977e1159b4e744c54f98d3a7bb4"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb9eca32f9b4acc7149db2c86f8108167b9929b7da1887d4287a90cfdb3ea53a"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4125e70e4e1d79992d81de837a0586aa0241665dbc5ce01b9c89330ed5cbb66"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:583bea142569485bdb0c5900e804058c16edba1850b74519688c22bc546e6175"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f3adf2891acc18abacd15113e9cbbefd30e5f4ecaae32c23e5486fc09c76ea5"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed65a2671d380ae05262ce1e4ccc2b63f3c30506d207bf6fae8cd72be0ad65d4"}, - {file = "xxhash-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:c604b3dcac9d37e3fceaa11884927024953260cc4224d9b89400d16e6cf34021"}, - {file = "xxhash-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1c6fc59e182506496544bc6d426bcf6077066ed1b40cfcd937f707cc06c7ef50"}, - {file = "xxhash-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5628375dbb76d33b93b44854a6c5433e2a78115e03ea2ae1bb74a34ab012a43f"}, - {file = "xxhash-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:687aa4373690f23a3f43cc23d81005304d284ff6c041bff1f967664ab6410f36"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fa2100fb68b163e99370561c9e29ed37b9153fe99443600bea28829150eb0e4"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:891d7651431a055f76fe2c8f86c593c3dede8ec5b10ca55e8ff5c9fdceb55f0b"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:197c32d7b62be02957ca31aa69febadf9c5a34ef953053ea16e2c72465bc450f"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91fa4df41bda3cbec4084d9696028780b47128c1f8450d1ad9c3e4b6bf8b1f99"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cd38b766fc40e9fe37b80112656d2e5a0cb2f9bc12e01b286353b5ecd2768e8"}, - {file = "xxhash-3.0.0-cp38-cp38-win32.whl", hash = "sha256:4258ef78f5a7d1f9c595846134c7d81a868c74942051453258eb383498662d4d"}, - {file = "xxhash-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b82b1cf4407ad908e04e864473cc3baa8e764c7bbebea959150764cc681a1611"}, - {file = "xxhash-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da4d91e28418469b29eed8635c08af28b588e51cd04288bed1ba1cf60f2d91f6"}, - {file = "xxhash-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48aab36169b0c00e586cb4eb2814ab8bfed686933126019906f917ff9a78c99e"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b0d522570c9ccea6203b3d96ac7f0cfc1d29e613640475d513be432545c48cc"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6054434ddb060685e86e7457f52d188b0886834baaa532f9f78b4f2b53cfd9b"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf546ca5f5903ceeb46d9e6abf81f3a64edb95bb7dbe0f75283eec93a7eb2a0"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22704f23f23ccbe892cee3e7568c67f07ac25beaa2d1cff183274005d9d39149"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83198e223bcc4b2418b5282ac930e444738c2a33859dee4e570b25c8433d83a2"}, - {file = "xxhash-3.0.0-cp39-cp39-win32.whl", hash = "sha256:3bcd4cd9b22293ea1c08822518fbb6d933c2960d66662d468a1945a45cace194"}, - {file = "xxhash-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5dd4c37da3408d56ae942dc103f4ae3b43510daa4f5accd0a411fc6e914f10a"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:485f172abc03f78afd4f38dbdbb5665f59c5487126fa4c3181c6582cda4de03b"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:035248b3d7ab6deb7b247278494d293b9faccfa853078319d25e2926f566b2f8"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30ae90c0cfd10ffe852c6b0f263253782eea74a8189d5f2440f6595c1e8047e"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fd203d8a3c013e679722047ef4f061f690c6cff49380622444bca4c30f3bf23"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6d60059aaef12a01c0cc24f1d7aaaab7933ae9f4b7adfd9ebbd37dc7ceac1745"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:676c97bf7cc298b65eec0368c2cb5611d87a8e876930843311ca728f69292752"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2245c6e20e96e3f8fdfb61ad6bc5cde6ce8a1c2b93aa4a32a27bba7ab3aeaf12"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae926a52d020085a2d7f69d0e2155cbf819ae409f2e5dbb345dd40a6462de32"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a2efdcb811be3edc520b78364c11a1e54f5d8e5db895a9ff2bcdd4a7ffa36a5"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:885b3a851980056707ab99a2c19c35dfe2c2ba5f602066dbfcd8af45ea855760"}, - {file = "xxhash-3.0.0.tar.gz", hash = "sha256:30b2d97aaf11fb122023f6b44ebb97c6955e9e00d7461a96415ca030b5ceb9c7"}, -] +xxhash = [] diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index f27fc598..2541e20e 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -33,2 +33,2 @@ sklearn = "^0.0" -tensorflow = {version = "^2.9.1", platform = "linux || win32"} -tensorflow-macos = {version = "^2.9.1", platform = "darwin"} +tensorflow = {version = "^2.9.1", markers = "sys_platform != 'darwin' or platform_machine != 'arm64'"} +tensorflow-macos = {version = "^2.9.1", markers = "sys_platform == 'darwin' and platform_machine == 'arm64'"} diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 1b8834e4..6ca15fb9 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -359 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -365 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -633 +633 @@ name = "grpcio" -version = "1.49.1" +version = "1.50.0" @@ -643 +643 @@ six = ">=1.5.2" -protobuf = ["grpcio-tools (>=1.49.1)"] +protobuf = ["grpcio-tools (>=1.50.0)"] @@ -787 +786,0 @@ url = "https://github.com/kpu/kenlm/archive/master.zip" - @@ -1196 +1195 @@ name = "pandas" -version = "1.5.0" +version = "1.5.1" @@ -1220 +1219 @@ name = "pbr" -version = "5.10.0" +version = "5.11.0" @@ -1315 +1314 @@ name = "psutil" -version = "5.9.2" +version = "5.9.3" @@ -1705 +1704 @@ name = "ruamel.yaml.clib" -version = "0.2.6" +version = "0.2.7" @@ -1752 +1751 @@ name = "scipy" -version = "1.9.2" +version = "1.9.3" @@ -1817 +1816 @@ name = "stevedore" -version = "4.0.1" +version = "4.1.0" @@ -2246 +2245 @@ name = "xxhash" -version = "3.0.0" +version = "3.1.0" @@ -2293 +2292 @@ python-versions = "3.9.6" -content-hash = "2aa2e60943104aea6952a60cdb35c4b05e790723bf35597739125c972afaa4b6" +content-hash = "cf5836d4de1661adb7edad80e268a276fafad82aaa7ba5e0c6bbe70d9c2f5a49" @@ -3314,56 +3313 @@ wrapt = [ -xxhash = [ - {file = "xxhash-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:219cba13991fd73cf21a5efdafa5056f0ae0b8f79e5e0112967e3058daf73eea"}, - {file = "xxhash-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fcbb846af15eff100c412ae54f4974ff277c92eacd41f1ec7803a64fd07fa0c"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f475fa817ff7955fc118fc1ca29a6e691d329b7ff43f486af36c22dbdcff1db"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9200a90f02ff6fd5fb63dea107842da71d8626d99b768fd31be44f3002c60bbe"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1403e4f551c9ef7bcef09af55f1adb169f13e4de253db0887928e5129f87af1"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa7f6ca53170189a2268c83af0980e6c10aae69e6a5efa7ca989f89fff9f8c02"}, - {file = "xxhash-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b63fbeb6d9c93d50ae0dc2b8a8b7f52f2de19e40fe9edc86637bfa5743b8ba2"}, - {file = "xxhash-3.0.0-cp310-cp310-win32.whl", hash = "sha256:31f25efd10b6f1f6d5c34cd231986d8aae9a42e042daa90b783917f170807869"}, - {file = "xxhash-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:807e88ed56e0fb347cb57d5bf44851f9878360fed700f2f63e622ef4eede87a5"}, - {file = "xxhash-3.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6d612c55a75d84d25898f6c5ad6a589aa556d1cb9af770b6c574ee62995167f6"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9309fcaf73f93df3101f03a61dc30644adff3e8d0044fff8c0c195dbbe63e2"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2273fe40720e86346a17f06ef95cd60ee0d66ffce7cf55e390ef7350112b16d"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc6f3a334587c83c5ba56c19b254a97542ce1fc05ccfd66fbf568e6117718d65"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36cf410da5bfcca51ac3c2c51a3317dcd7af91f70fa61eca57fba39554f06ae3"}, - {file = "xxhash-3.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21752a3e9a2391d91bd51f4aa2fe028ae14ba6a8d37db9ebe00ccac10be5ac4a"}, - {file = "xxhash-3.0.0-cp36-cp36m-win32.whl", hash = "sha256:322068a063ef156455a401ab720f0892f2d2dd1540c1a308e95a7cbf356df51c"}, - {file = "xxhash-3.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2984fa9a880587c0bfa46d32717b2d209863ee68727ea0fc17f05fce25efa692"}, - {file = "xxhash-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6493dd938b360235da81b1c79d8cd048c4f11977e1159b4e744c54f98d3a7bb4"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb9eca32f9b4acc7149db2c86f8108167b9929b7da1887d4287a90cfdb3ea53a"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4125e70e4e1d79992d81de837a0586aa0241665dbc5ce01b9c89330ed5cbb66"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:583bea142569485bdb0c5900e804058c16edba1850b74519688c22bc546e6175"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f3adf2891acc18abacd15113e9cbbefd30e5f4ecaae32c23e5486fc09c76ea5"}, - {file = "xxhash-3.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed65a2671d380ae05262ce1e4ccc2b63f3c30506d207bf6fae8cd72be0ad65d4"}, - {file = "xxhash-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:c604b3dcac9d37e3fceaa11884927024953260cc4224d9b89400d16e6cf34021"}, - {file = "xxhash-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1c6fc59e182506496544bc6d426bcf6077066ed1b40cfcd937f707cc06c7ef50"}, - {file = "xxhash-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5628375dbb76d33b93b44854a6c5433e2a78115e03ea2ae1bb74a34ab012a43f"}, - {file = "xxhash-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:687aa4373690f23a3f43cc23d81005304d284ff6c041bff1f967664ab6410f36"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fa2100fb68b163e99370561c9e29ed37b9153fe99443600bea28829150eb0e4"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:891d7651431a055f76fe2c8f86c593c3dede8ec5b10ca55e8ff5c9fdceb55f0b"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:197c32d7b62be02957ca31aa69febadf9c5a34ef953053ea16e2c72465bc450f"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91fa4df41bda3cbec4084d9696028780b47128c1f8450d1ad9c3e4b6bf8b1f99"}, - {file = "xxhash-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cd38b766fc40e9fe37b80112656d2e5a0cb2f9bc12e01b286353b5ecd2768e8"}, - {file = "xxhash-3.0.0-cp38-cp38-win32.whl", hash = "sha256:4258ef78f5a7d1f9c595846134c7d81a868c74942051453258eb383498662d4d"}, - {file = "xxhash-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b82b1cf4407ad908e04e864473cc3baa8e764c7bbebea959150764cc681a1611"}, - {file = "xxhash-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da4d91e28418469b29eed8635c08af28b588e51cd04288bed1ba1cf60f2d91f6"}, - {file = "xxhash-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48aab36169b0c00e586cb4eb2814ab8bfed686933126019906f917ff9a78c99e"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b0d522570c9ccea6203b3d96ac7f0cfc1d29e613640475d513be432545c48cc"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6054434ddb060685e86e7457f52d188b0886834baaa532f9f78b4f2b53cfd9b"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf546ca5f5903ceeb46d9e6abf81f3a64edb95bb7dbe0f75283eec93a7eb2a0"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22704f23f23ccbe892cee3e7568c67f07ac25beaa2d1cff183274005d9d39149"}, - {file = "xxhash-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83198e223bcc4b2418b5282ac930e444738c2a33859dee4e570b25c8433d83a2"}, - {file = "xxhash-3.0.0-cp39-cp39-win32.whl", hash = "sha256:3bcd4cd9b22293ea1c08822518fbb6d933c2960d66662d468a1945a45cace194"}, - {file = "xxhash-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5dd4c37da3408d56ae942dc103f4ae3b43510daa4f5accd0a411fc6e914f10a"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:485f172abc03f78afd4f38dbdbb5665f59c5487126fa4c3181c6582cda4de03b"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:035248b3d7ab6deb7b247278494d293b9faccfa853078319d25e2926f566b2f8"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30ae90c0cfd10ffe852c6b0f263253782eea74a8189d5f2440f6595c1e8047e"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fd203d8a3c013e679722047ef4f061f690c6cff49380622444bca4c30f3bf23"}, - {file = "xxhash-3.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6d60059aaef12a01c0cc24f1d7aaaab7933ae9f4b7adfd9ebbd37dc7ceac1745"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:676c97bf7cc298b65eec0368c2cb5611d87a8e876930843311ca728f69292752"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2245c6e20e96e3f8fdfb61ad6bc5cde6ce8a1c2b93aa4a32a27bba7ab3aeaf12"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae926a52d020085a2d7f69d0e2155cbf819ae409f2e5dbb345dd40a6462de32"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a2efdcb811be3edc520b78364c11a1e54f5d8e5db895a9ff2bcdd4a7ffa36a5"}, - {file = "xxhash-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:885b3a851980056707ab99a2c19c35dfe2c2ba5f602066dbfcd8af45ea855760"}, - {file = "xxhash-3.0.0.tar.gz", hash = "sha256:30b2d97aaf11fb122023f6b44ebb97c6955e9e00d7461a96415ca030b5ceb9c7"}, -] +xxhash = [] diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index e12a89f8..585a96e9 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -33,2 +33,2 @@ sklearn = "^0.0" -tensorflow = {version = "^2.9.1", platform = "linux || win32"} -tensorflow-macos = {version = "^2.9.1", platform = "darwin"} +tensorflow = {version = "^2.9.1", markers = "sys_platform != 'darwin' or platform_machine != 'arm64'"} +tensorflow-macos = {version = "^2.9.1", markers = "sys_platform == 'darwin' and platform_machine == 'arm64'"}
5f6112716f0e65dc4cd13463c2daf3d7c58438a8
Sylvain Lesage
2022-10-21T13:55:25
refactor: 💡 setup everything in the configs (#615)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 4c52231d..caaf3842 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -5,2 +5,2 @@ - "admin": "huggingface/datasets-server-services-admin:sha-ccfbc0c", - "api": "huggingface/datasets-server-services-api:sha-ccfbc0c", + "admin": "huggingface/datasets-server-services-admin:sha-5dc1a23", + "api": "huggingface/datasets-server-services-api:sha-5dc1a23", @@ -9,2 +9,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-ccfbc0c", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-ccfbc0c" + "splits": "huggingface/datasets-server-workers-splits:sha-5dc1a23", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-9b9e8d0" diff --git a/libs/libcache/dist/libcache-0.3.1-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.1-py3-none-any.whl new file mode 100644 index 00000000..c9759c18 Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.1-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.3.1.tar.gz b/libs/libcache/dist/libcache-0.3.1.tar.gz new file mode 100644 index 00000000..9346caff Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.1.tar.gz differ diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index 1b36b3b2..6e28c497 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -5 +5 @@ name = "libcache" -version = "0.3.0" +version = "0.3.1" diff --git a/libs/libcache/src/libcache/asset.py b/libs/libcache/src/libcache/asset.py index 73b0f1bc..970013e4 100644 --- a/libs/libcache/src/libcache/asset.py +++ b/libs/libcache/src/libcache/asset.py @@ -10,2 +9,0 @@ from appdirs import user_cache_dir # type:ignore -logger = logging.getLogger(__name__) - @@ -20,0 +19 @@ def init_assets_dir(assets_directory: Optional[str] = None) -> str: + logging.info(f"Assets directory: {assets_directory}") @@ -22,4 +20,0 @@ def init_assets_dir(assets_directory: Optional[str] = None) -> str: - - -def show_assets_dir(assets_directory: Optional[str] = None) -> None: - logger.info(f"Assets directory: {init_assets_dir(assets_directory)}") diff --git a/libs/libcache/src/libcache/config.py b/libs/libcache/src/libcache/config.py index 4163e98c..26c2612f 100644 --- a/libs/libcache/src/libcache/config.py +++ b/libs/libcache/src/libcache/config.py @@ -7,0 +8,3 @@ from environs import Env +from libcache.asset import init_assets_dir +from libcache.simple_cache import connect_to_database + @@ -10 +13,2 @@ class CacheConfig: - assets_directory: Optional[str] + _assets_directory: Optional[str] + assets_directory: str @@ -17 +21 @@ class CacheConfig: - self.assets_directory = env.str(name="ASSETS_DIRECTORY", default=None) + self._assets_directory = env.str(name="ASSETS_DIRECTORY", default=None) @@ -19,0 +24,5 @@ class CacheConfig: + self.setup() + + def setup(self): + connect_to_database(database=self.mongo_database, host=self.mongo_url) + self.assets_directory = init_assets_dir(assets_directory=self._assets_directory) diff --git a/libs/libcache/src/libcache/simple_cache.py b/libs/libcache/src/libcache/simple_cache.py index 0f31b86b..9a2e1c87 100644 --- a/libs/libcache/src/libcache/simple_cache.py +++ b/libs/libcache/src/libcache/simple_cache.py @@ -4 +3,0 @@ -import logging @@ -35,3 +33,0 @@ QuerySet.__class_getitem__ = types.MethodType(no_op, QuerySet) -logger = logging.getLogger(__name__) - - @@ -46 +42 @@ class QuerySetManager(Generic[U]): -def connect_to_cache(database, host) -> None: +def connect_to_database(database: str, host: str) -> None: diff --git a/libs/libcache/tests/conftest.py b/libs/libcache/tests/conftest.py index e6b69f1f..27549e33 100644 --- a/libs/libcache/tests/conftest.py +++ b/libs/libcache/tests/conftest.py @@ -18 +18 @@ def monkeypatch_session(): -@fixture(scope="session") +@fixture(scope="session", autouse=True) diff --git a/libs/libcache/tests/test_simple_cache.py b/libs/libcache/tests/test_simple_cache.py index 661da294..69abc590 100644 --- a/libs/libcache/tests/test_simple_cache.py +++ b/libs/libcache/tests/test_simple_cache.py @@ -10 +9,0 @@ from pymongo.errors import DocumentTooLarge -from libcache.config import CacheConfig @@ -16 +14,0 @@ from libcache.simple_cache import ( - connect_to_cache, @@ -36,5 +33,0 @@ from libcache.simple_cache import ( [email protected](autouse=True, scope="module") -def client(cache_config: CacheConfig) -> None: - connect_to_cache(database=cache_config.mongo_database, host=cache_config.mongo_url) - - diff --git a/libs/libcommon/README.md b/libs/libcommon/README.md index b75af640..b5e9f742 100644 --- a/libs/libcommon/README.md +++ b/libs/libcommon/README.md @@ -3 +3 @@ -A Python library with common code (configuration, utils, logger, exceptions) used by the services and the workers +A Python library with common code (configuration, utils, logging, exceptions) used by the services and the workers diff --git a/libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl new file mode 100644 index 00000000..5517358f Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.3.2.tar.gz b/libs/libcommon/dist/libcommon-0.3.2.tar.gz new file mode 100644 index 00000000..5e78bfef Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.2.tar.gz differ diff --git a/libs/libcommon/pyproject.toml b/libs/libcommon/pyproject.toml index 627c64e7..00279f56 100644 --- a/libs/libcommon/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -5 +5 @@ name = "libcommon" -version = "0.3.1" +version = "0.3.2" diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py index fb70f744..75aeda4d 100644 --- a/libs/libcommon/src/libcommon/config.py +++ b/libs/libcommon/src/libcommon/config.py @@ -7,0 +8,2 @@ from environs import Env +from libcommon.log import init_logging + @@ -22,0 +25,4 @@ class CommonConfig: + self.setup() + + def setup(self): + init_logging(self.log_level) diff --git a/libs/libcommon/src/libcommon/log.py b/libs/libcommon/src/libcommon/log.py new file mode 100644 index 00000000..c531c5dd --- /dev/null +++ b/libs/libcommon/src/libcommon/log.py @@ -0,0 +1,9 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import logging + + +def init_logging(log_level: int = logging.INFO) -> None: + logging.basicConfig(level=log_level, format="%(levelname)s: %(asctime)s - %(name)s - %(message)s") + logging.debug(f"Log level set to: {logging.getLevelName(logging.getLogger().getEffectiveLevel())}") diff --git a/libs/libcommon/src/libcommon/logger.py b/libs/libcommon/src/libcommon/logger.py deleted file mode 100644 index 2b7ee030..00000000 --- a/libs/libcommon/src/libcommon/logger.py +++ /dev/null @@ -1,17 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import logging - - -def init_logger(log_level: int = logging.INFO, name: str = "datasets_server") -> None: - logger = logging.getLogger(name) - logger.setLevel(log_level) - - formatter = logging.Formatter("%(levelname)s: %(asctime)s - %(name)s - %(message)s") - - logHandler = logging.StreamHandler() - logHandler.setFormatter(formatter) - logger.addHandler(logHandler) - - logger.debug(f"Log level set to: {logging.getLevelName(logger.getEffectiveLevel())}") diff --git a/libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl new file mode 100644 index 00000000..7a905795 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.2.tar.gz b/libs/libqueue/dist/libqueue-0.4.2.tar.gz new file mode 100644 index 00000000..b4baf64d Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.2.tar.gz differ diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index 931ee49f..993063a6 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.4.1" +version = "0.4.2" diff --git a/libs/libqueue/src/libqueue/config.py b/libs/libqueue/src/libqueue/config.py index b10d6f12..45056a77 100644 --- a/libs/libqueue/src/libqueue/config.py +++ b/libs/libqueue/src/libqueue/config.py @@ -5,0 +6,2 @@ from environs import Env +from libqueue.queue import connect_to_database + @@ -23,0 +26,4 @@ class QueueConfig: + self.setup() + + def setup(self): + connect_to_database(database=self.mongo_database, host=self.mongo_url) diff --git a/libs/libqueue/src/libqueue/queue.py b/libs/libqueue/src/libqueue/queue.py index 4139fd85..b23e200b 100644 --- a/libs/libqueue/src/libqueue/queue.py +++ b/libs/libqueue/src/libqueue/queue.py @@ -34,2 +33,0 @@ class QuerySetManager(Generic[U]): -logger = logging.getLogger(__name__) - @@ -81,2 +79,2 @@ def get_datetime() -> datetime: -def connect_to_queue(database, host) -> None: - connect(database, alias="queue", host=host) +def connect_to_database(database: str, host: str) -> None: + connect(db=database, alias="queue", host=host) @@ -265 +263 @@ class Queue: - logger.error(f"job {job_id} does not exist. Aborting.") + logging.error(f"job {job_id} does not exist. Aborting.") @@ -268 +266 @@ class Queue: - logger.warning( + logging.warning( @@ -272 +270 @@ class Queue: - logger.warning(f"job {job.to_id()} has a non-empty finished_at field. Force finishing anyway.") + logging.warning(f"job {job.to_id()} has a non-empty finished_at field. Force finishing anyway.") @@ -274 +272 @@ class Queue: - logger.warning(f"job {job.to_id()} has an empty started_at field. Force finishing anyway.") + logging.warning(f"job {job.to_id()} has an empty started_at field. Force finishing anyway.") diff --git a/libs/libqueue/src/libqueue/worker.py b/libs/libqueue/src/libqueue/worker.py index 802eb866..383e2723 100644 --- a/libs/libqueue/src/libqueue/worker.py +++ b/libs/libqueue/src/libqueue/worker.py @@ -15,2 +14,0 @@ from libqueue.queue import EmptyQueue, Queue -logger = logging.getLogger(__name__) - @@ -37 +35 @@ class Worker(ABC): - logger.info( + logging.info( @@ -49 +47 @@ class Worker(ABC): - logger.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.queue_config.max_load_pct}%") + logging.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.queue_config.max_load_pct}%") @@ -56 +54 @@ class Worker(ABC): - logger.debug(f"sleep during {duration:.2f} seconds") + logging.debug(f"sleep during {duration:.2f} seconds") @@ -68 +66 @@ class Worker(ABC): - logger.critical(f"quit due to an uncaught error while processing the job: {e}") + logging.critical(f"quit due to an uncaught error while processing the job: {e}") @@ -72 +70 @@ class Worker(ABC): - logger.debug("try to process a job") + logging.debug("try to process a job") @@ -77 +75 @@ class Worker(ABC): - logger.debug(f"job assigned: {job_id} for {parameters_for_log}") + logging.debug(f"job assigned: {job_id} for {parameters_for_log}") @@ -79 +77 @@ class Worker(ABC): - logger.debug("no job in the queue") + logging.debug("no job in the queue") @@ -83 +81 @@ class Worker(ABC): - logger.info(f"compute {parameters_for_log}") + logging.info(f"compute {parameters_for_log}") @@ -92 +90 @@ class Worker(ABC): - logger.debug(f"job finished with {result}: {job_id} for {parameters_for_log}") + logging.debug(f"job finished with {result}: {job_id} for {parameters_for_log}") diff --git a/libs/libqueue/tests/conftest.py b/libs/libqueue/tests/conftest.py index 4e8c8ff0..f38337bc 100644 --- a/libs/libqueue/tests/conftest.py +++ b/libs/libqueue/tests/conftest.py @@ -18 +18 @@ def monkeypatch_session(): -@fixture(scope="session") +@fixture(scope="session", autouse=True) diff --git a/libs/libqueue/tests/test_queue.py b/libs/libqueue/tests/test_queue.py index 73a3a17e..3875ba65 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libqueue/tests/test_queue.py @@ -8 +7,0 @@ import pytest -from libqueue.config import QueueConfig @@ -15 +13,0 @@ from libqueue.queue import ( - connect_to_queue, @@ -20,5 +17,0 @@ from libqueue.queue import ( [email protected](autouse=True, scope="module") -def client(queue_config: QueueConfig) -> None: - connect_to_queue(database=queue_config.mongo_database, host=queue_config.mongo_url) - - diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index b905b0db..56bc11f1 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -307 +307 @@ name = "libcache" -version = "0.3.0" +version = "0.3.1" @@ -322 +322 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" @@ -326 +326 @@ name = "libcommon" -version = "0.3.1" +version = "0.3.2" @@ -338 +338 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl" @@ -342 +342 @@ name = "libqueue" -version = "0.4.1" +version = "0.4.2" @@ -357 +357 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" @@ -862 +862 @@ python-versions = "3.9.6" -content-hash = "84c8c8207b78423217d0bded3a27686d73ba83c7797afce314e96214e4dbd3e0" +content-hash = "3b9dd1abc61cff1ada376c6fd38e78f0a500f63181966e98e0c7a98994573459" @@ -923 +923 @@ libcache = [ - {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, + {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, @@ -926 +926 @@ libcommon = [ - {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, + {file = "libcommon-0.3.2-py3-none-any.whl", hash = "sha256:774292c9ea2d9ab50c4bf2a3eb212bfaf924096a4279ecf117b70618702d978d"}, @@ -929 +929 @@ libqueue = [ - {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, + {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index 2833d178..184c20a5 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -11,3 +11,3 @@ huggingface-hub = "^0.8.1" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 7b5828b1..d1decd65 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -5,3 +4,0 @@ import uvicorn # type: ignore -from libcache.simple_cache import connect_to_cache -from libcommon.logger import init_logger -from libqueue.queue import connect_to_queue @@ -24,3 +20,0 @@ def create_app() -> Starlette: - init_logger(app_config.common.log_level) - connect_to_cache(database=app_config.cache.mongo_database, host=app_config.cache.mongo_url) - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index ad5b6a4f..82250268 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -53 +53 @@ class AppConfig: - self.cache = CacheConfig() + # First process the common configuration to setup the logging @@ -54,0 +55 @@ class AppConfig: + self.cache = CacheConfig() diff --git a/services/admin/src/admin/routes/cache_reports.py b/services/admin/src/admin/routes/cache_reports.py index 3737eeda..849b8038 100644 --- a/services/admin/src/admin/routes/cache_reports.py +++ b/services/admin/src/admin/routes/cache_reports.py @@ -27,3 +26,0 @@ from admin.utils import ( -logger = logging.getLogger(__name__) - - @@ -52 +49 @@ def create_cache_reports_endpoint( - logger.info(f"/cache-reports/{endpoint}, cursor={cursor}") + logging.info(f"/cache-reports/{endpoint}, cursor={cursor}") diff --git a/services/admin/src/admin/routes/healthcheck.py b/services/admin/src/admin/routes/healthcheck.py index 5ea61a2c..d5b66889 100644 --- a/services/admin/src/admin/routes/healthcheck.py +++ b/services/admin/src/admin/routes/healthcheck.py @@ -9,2 +8,0 @@ from starlette.responses import PlainTextResponse, Response -logger = logging.getLogger(__name__) - @@ -13 +11 @@ async def healthcheck_endpoint(_: Request) -> Response: - logger.info("/healthcheck") + logging.info("/healthcheck") diff --git a/services/admin/src/admin/routes/pending_jobs.py b/services/admin/src/admin/routes/pending_jobs.py index cba8a573..51f1a6f2 100644 --- a/services/admin/src/admin/routes/pending_jobs.py +++ b/services/admin/src/admin/routes/pending_jobs.py @@ -21,2 +20,0 @@ from admin.utils import ( -logger = logging.getLogger(__name__) - @@ -31 +29 @@ def create_pending_jobs_endpoint( - logger.info("/pending-jobs") + logging.info("/pending-jobs") diff --git a/services/admin/src/admin/scripts/cancel_jobs_first_rows.py b/services/admin/src/admin/scripts/cancel_jobs_first_rows.py index b8b844fb..88866722 100644 --- a/services/admin/src/admin/scripts/cancel_jobs_first_rows.py +++ b/services/admin/src/admin/scripts/cancel_jobs_first_rows.py @@ -6,2 +6 @@ import logging -from libcommon.logger import init_logger -from libqueue.queue import Queue, connect_to_queue +from libqueue.queue import Queue @@ -14,3 +12,0 @@ if __name__ == "__main__": - init_logger(app_config.common.log_level, "cancel_jobs_first_rows") - logger = logging.getLogger("cancel_jobs_first_rows") - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) @@ -18 +14 @@ if __name__ == "__main__": - logger.info("all the started jobs in the first_rows/ queue have been cancelled and re-enqueued") + logging.info("all the started jobs in the first_rows/ queue have been cancelled and re-enqueued") diff --git a/services/admin/src/admin/scripts/cancel_jobs_splits.py b/services/admin/src/admin/scripts/cancel_jobs_splits.py index 84781418..ae497b10 100644 --- a/services/admin/src/admin/scripts/cancel_jobs_splits.py +++ b/services/admin/src/admin/scripts/cancel_jobs_splits.py @@ -6,2 +6 @@ import logging -from libcommon.logger import init_logger -from libqueue.queue import Queue, connect_to_queue +from libqueue.queue import Queue @@ -14,3 +12,0 @@ if __name__ == "__main__": - init_logger(app_config.common.log_level, "cancel_jobs_splits") - logger = logging.getLogger("cancel_jobs_splits") - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) @@ -18 +14 @@ if __name__ == "__main__": - logger.info("all the started jobs in the splits/ queue have been cancelled and re-enqueued") + logging.info("all the started jobs in the splits/ queue have been cancelled and re-enqueued") diff --git a/services/admin/src/admin/scripts/refresh_cache.py b/services/admin/src/admin/scripts/refresh_cache.py index ff15aff1..dd69d734 100644 --- a/services/admin/src/admin/scripts/refresh_cache.py +++ b/services/admin/src/admin/scripts/refresh_cache.py @@ -8,2 +8 @@ from huggingface_hub.hf_api import HfApi # type: ignore -from libcommon.logger import init_logger -from libqueue.queue import Queue, connect_to_queue +from libqueue.queue import Queue @@ -20 +18,0 @@ def refresh_datasets_cache(dataset_names: List[str]) -> None: - logger = logging.getLogger("refresh_cache") @@ -25 +23 @@ def refresh_datasets_cache(dataset_names: List[str]) -> None: - logger.info(f"added a job to refresh '{dataset_name}'") + logging.info(f"added a job to refresh '{dataset_name}'") @@ -30,3 +27,0 @@ if __name__ == "__main__": - init_logger(app_config.common.log_level, "refresh_cache") - logger = logging.getLogger("refresh_cache") - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) @@ -34 +29 @@ if __name__ == "__main__": - logger.info("all the datasets of the Hub have been added to the queue to refresh the cache") + logging.info("all the datasets of the Hub have been added to the queue to refresh the cache") diff --git a/services/admin/src/admin/scripts/refresh_cache_canonical.py b/services/admin/src/admin/scripts/refresh_cache_canonical.py index 2283a0b1..d08d885c 100644 --- a/services/admin/src/admin/scripts/refresh_cache_canonical.py +++ b/services/admin/src/admin/scripts/refresh_cache_canonical.py @@ -7,2 +6,0 @@ from huggingface_hub.hf_api import HfApi # type: ignore -from libcommon.logger import init_logger -from libqueue.queue import connect_to_queue @@ -20,3 +17,0 @@ if __name__ == "__main__": - init_logger(app_config.common.log_level, "refresh_cache_canonical") - logger = logging.getLogger("refresh_cache_canonical") - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) @@ -24 +19 @@ if __name__ == "__main__": - logger.info("all the canonical datasets of the Hub have been added to the queue to refresh the cache") + logging.info("all the canonical datasets of the Hub have been added to the queue to refresh the cache") diff --git a/services/admin/src/admin/scripts/refresh_cache_errors.py b/services/admin/src/admin/scripts/refresh_cache_errors.py index ae5f3a26..6e2c3556 100644 --- a/services/admin/src/admin/scripts/refresh_cache_errors.py +++ b/services/admin/src/admin/scripts/refresh_cache_errors.py @@ -6,3 +6 @@ import logging -from libcache.simple_cache import connect_to_cache, get_datasets_with_some_error -from libcommon.logger import init_logger -from libqueue.queue import connect_to_queue +from libcache.simple_cache import get_datasets_with_some_error @@ -15,4 +12,0 @@ if __name__ == "__main__": - init_logger(app_config.common.log_level, "refresh_cache_canonical") - logger = logging.getLogger("refresh_cache_canonical") - connect_to_cache(database=app_config.cache.mongo_database, host=app_config.cache.mongo_url) - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) @@ -20 +14 @@ if __name__ == "__main__": - logger.info("all the datasets with some error in the cache have been added to the queue to be refreshed") + logging.info("all the datasets with some error in the cache have been added to the queue to be refreshed") diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 504eeda4..3968b09f 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -323 +323 @@ name = "libcache" -version = "0.3.0" +version = "0.3.1" @@ -338 +338 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" @@ -342 +342 @@ name = "libcommon" -version = "0.3.1" +version = "0.3.2" @@ -354 +354 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl" @@ -358 +358 @@ name = "libqueue" -version = "0.4.1" +version = "0.4.2" @@ -373 +373 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" @@ -904 +904 @@ python-versions = "3.9.6" -content-hash = "0f241acb38c4931b5f2e9ae27adfff99e98536f4fc5c005b0afe673fbf95f495" +content-hash = "8ef5a2fce9713c54622d086aafcc10d434fc8ec13430d22a715845c4206b8f19" @@ -963 +963 @@ libcache = [ - {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, + {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, @@ -966 +966 @@ libcommon = [ - {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, + {file = "libcommon-0.3.2-py3-none-any.whl", hash = "sha256:774292c9ea2d9ab50c4bf2a3eb212bfaf924096a4279ecf117b70618702d978d"}, @@ -969 +969 @@ libqueue = [ - {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, + {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index dc0a7a3d..a8156b8d 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -11,3 +11,3 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } diff --git a/services/api/src/api/app.py b/services/api/src/api/app.py index 67bff8ef..6fde2b09 100644 --- a/services/api/src/api/app.py +++ b/services/api/src/api/app.py @@ -7,4 +6,0 @@ import uvicorn # type: ignore -from libcache.asset import init_assets_dir, show_assets_dir -from libcache.simple_cache import connect_to_cache -from libcommon.logger import init_logger -from libqueue.queue import connect_to_queue @@ -30,4 +25,0 @@ def create_app() -> Starlette: - init_logger(app_config.common.log_level) - connect_to_cache(database=app_config.cache.mongo_database, host=app_config.cache.mongo_url) - connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) - show_assets_dir(assets_directory=app_config.cache.assets_directory) @@ -88,3 +80 @@ def create_app() -> Starlette: - app=StaticFiles( - directory=init_assets_dir(assets_directory=app_config.cache.assets_directory), check_dir=True - ), + app=StaticFiles(directory=app_config.cache.assets_directory, check_dir=True), diff --git a/services/api/src/api/config.py b/services/api/src/api/config.py index a18ce0bc..d35b6b5b 100644 --- a/services/api/src/api/config.py +++ b/services/api/src/api/config.py @@ -50 +50 @@ class AppConfig: - self.cache = CacheConfig() + # First process the common configuration to setup the logging @@ -51,0 +52 @@ class AppConfig: + self.cache = CacheConfig() diff --git a/services/api/src/api/dataset.py b/services/api/src/api/dataset.py index 28561dd8..3c961670 100644 --- a/services/api/src/api/dataset.py +++ b/services/api/src/api/dataset.py @@ -22,2 +21,0 @@ from api.utils import JobType -logger = logging.getLogger(__name__) - @@ -54 +52 @@ def update(dataset: str) -> None: - logger.debug(f"webhook: refresh {dataset}") + logging.debug(f"webhook: refresh {dataset}") @@ -61 +59 @@ def delete(dataset: str) -> None: - logger.debug(f"webhook: delete {dataset}") + logging.debug(f"webhook: delete {dataset}") diff --git a/services/api/src/api/routes/first_rows.py b/services/api/src/api/routes/first_rows.py index 353d5112..1a5b693d 100644 --- a/services/api/src/api/routes/first_rows.py +++ b/services/api/src/api/routes/first_rows.py @@ -27,2 +26,0 @@ from api.utils import ( -logger = logging.getLogger(__name__) - @@ -42 +40 @@ def create_first_rows_endpoint( - logger.info(f"/first-rows, dataset={dataset}, config={config}, split={split}") + logging.info(f"/first-rows, dataset={dataset}, config={config}, split={split}") diff --git a/services/api/src/api/routes/healthcheck.py b/services/api/src/api/routes/healthcheck.py index 5ea61a2c..d5b66889 100644 --- a/services/api/src/api/routes/healthcheck.py +++ b/services/api/src/api/routes/healthcheck.py @@ -9,2 +8,0 @@ from starlette.responses import PlainTextResponse, Response -logger = logging.getLogger(__name__) - @@ -13 +11 @@ async def healthcheck_endpoint(_: Request) -> Response: - logger.info("/healthcheck") + logging.info("/healthcheck") diff --git a/services/api/src/api/routes/splits.py b/services/api/src/api/routes/splits.py index eb9e3a21..0a441bac 100644 --- a/services/api/src/api/routes/splits.py +++ b/services/api/src/api/routes/splits.py @@ -27,2 +26,0 @@ from api.utils import ( -logger = logging.getLogger(__name__) - @@ -40 +38 @@ def create_splits_endpoint( - logger.info(f"/splits, dataset={dataset}") + logging.info(f"/splits, dataset={dataset}") diff --git a/services/api/src/api/routes/valid.py b/services/api/src/api/routes/valid.py index d892d595..1745e323 100644 --- a/services/api/src/api/routes/valid.py +++ b/services/api/src/api/routes/valid.py @@ -22,2 +21,0 @@ from api.utils import ( -logger = logging.getLogger(__name__) - @@ -27 +25 @@ async def valid_endpoint(_: Request) -> Response: - logger.info("/valid") + logging.info("/valid") @@ -40 +38 @@ def create_is_valid_endpoint( - logger.info(f"/is-valid, dataset={dataset_name}") + logging.info(f"/is-valid, dataset={dataset_name}") diff --git a/services/api/src/api/routes/webhook.py b/services/api/src/api/routes/webhook.py index f449e1df..416149b3 100644 --- a/services/api/src/api/routes/webhook.py +++ b/services/api/src/api/routes/webhook.py @@ -14,3 +13,0 @@ from api.utils import Endpoint, get_response -logger = logging.getLogger(__name__) - - @@ -89 +86 @@ def create_webhook_endpoint(hf_endpoint: str, hf_token: Optional[str] = None) -> - logger.info(f"/webhook: {json}") + logging.info(f"/webhook: {json}") diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 3762689a..e19aabc2 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -823 +823 @@ name = "libcache" -version = "0.3.0" +version = "0.3.1" @@ -838 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" @@ -850 +850 @@ name = "libcommon" -version = "0.3.1" +version = "0.3.2" @@ -862 +862 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl" @@ -866 +866 @@ name = "libqueue" -version = "0.4.1" +version = "0.4.2" @@ -881 +881 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" @@ -2293 +2293 @@ python-versions = "3.9.6" -content-hash = "7205adaa92829e625bdb89fa7a67c192aaaadd242cca0d45cb2cf5aed455631d" +content-hash = "2aa2e60943104aea6952a60cdb35c4b05e790723bf35597739125c972afaa4b6" @@ -2623 +2623 @@ libcache = [ - {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, + {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, @@ -2627 +2627 @@ libcommon = [ - {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, + {file = "libcommon-0.3.2-py3-none-any.whl", hash = "sha256:774292c9ea2d9ab50c4bf2a3eb212bfaf924096a4279ecf117b70618702d978d"}, @@ -2630 +2630 @@ libqueue = [ - {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, + {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index 0d137d53..f27fc598 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -19,3 +19,3 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/asset.py b/workers/first_rows/src/first_rows/asset.py index fd63269e..dc652da0 100644 --- a/workers/first_rows/src/first_rows/asset.py +++ b/workers/first_rows/src/first_rows/asset.py @@ -4 +3,0 @@ -import logging @@ -7 +6 @@ from pathlib import Path -from typing import List, Optional, Tuple, TypedDict +from typing import List, Tuple, TypedDict @@ -10 +8,0 @@ import soundfile # type:ignore -from libcache.asset import init_assets_dir @@ -15,2 +12,0 @@ from pydub import AudioSegment # type:ignore -logger = logging.getLogger(__name__) - @@ -22 +18 @@ def create_asset_dir( - dataset: str, config: str, split: str, row_idx: int, column: str, assets_directory: Optional[str] + dataset: str, config: str, split: str, row_idx: int, column: str, assets_directory: str @@ -24,2 +20 @@ def create_asset_dir( - assets_dir = init_assets_dir(assets_directory) - dir_path = Path(assets_dir).resolve() / dataset / DATASET_SEPARATOR / config / split / str(row_idx) / column + dir_path = Path(assets_directory).resolve() / dataset / DATASET_SEPARATOR / config / split / str(row_idx) / column @@ -46 +41 @@ def create_image_file( - assets_directory: Optional[str], + assets_directory: str, @@ -75 +70 @@ def create_audio_files( - assets_directory: Optional[str], + assets_directory: str, diff --git a/workers/first_rows/src/first_rows/config.py b/workers/first_rows/src/first_rows/config.py index b2e60541..a885cea5 100644 --- a/workers/first_rows/src/first_rows/config.py +++ b/workers/first_rows/src/first_rows/config.py @@ -36 +36 @@ class WorkerConfig: - self.cache = CacheConfig() + # First process the common configuration to setup the logging @@ -37,0 +38 @@ class WorkerConfig: + self.cache = CacheConfig() diff --git a/workers/first_rows/src/first_rows/features.py b/workers/first_rows/src/first_rows/features.py index 0008df82..b32d70dc 100644 --- a/workers/first_rows/src/first_rows/features.py +++ b/workers/first_rows/src/first_rows/features.py @@ -5 +5 @@ import json -from typing import Any, List, Optional, Union +from typing import Any, List, Union @@ -52 +52 @@ def image( - assets_directory: Optional[str], + assets_directory: str, @@ -89 +89 @@ def audio( - assets_directory: Optional[str], + assets_directory: str, @@ -127 +127 @@ def get_cell_value( - assets_directory: Optional[str], + assets_directory: str, diff --git a/workers/first_rows/src/first_rows/main.py b/workers/first_rows/src/first_rows/main.py index 6c89f15f..03828dec 100644 --- a/workers/first_rows/src/first_rows/main.py +++ b/workers/first_rows/src/first_rows/main.py @@ -4,5 +3,0 @@ -from libcache.asset import show_assets_dir -from libcache.simple_cache import connect_to_cache -from libcommon.logger import init_logger -from libqueue.queue import connect_to_queue - @@ -14,5 +8,0 @@ if __name__ == "__main__": - init_logger(worker_config.common.log_level) - connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) - connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.cache.mongo_url) - show_assets_dir(assets_directory=worker_config.cache.assets_directory) - diff --git a/workers/first_rows/src/first_rows/response.py b/workers/first_rows/src/first_rows/response.py index f5b9e8fb..4f90c926 100644 --- a/workers/first_rows/src/first_rows/response.py +++ b/workers/first_rows/src/first_rows/response.py @@ -37,3 +36,0 @@ from first_rows.utils import ( -logger = logging.getLogger(__name__) - - @@ -63 +60 @@ class FirstRowsResponse(TypedDict): -@retry(logger=logger) +@retry() @@ -87 +84 @@ def get_rows( - logger.debug(f"all the rows in the split have been fetched ({len(rows_plus_one)})") + logging.debug(f"all the rows in the split have been fetched ({len(rows_plus_one)})") @@ -89 +86 @@ def get_rows( - logger.debug(f"the rows in the split have been truncated ({rows_max_number} rows)") + logging.debug(f"the rows in the split have been truncated ({rows_max_number} rows)") @@ -136 +133 @@ def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_b - logger.debug(f"the size of the rows is now ({rows_bytes}) after truncating row idx={row_idx}") + logging.debug(f"the size of the rows is now ({rows_bytes}) after truncating row idx={row_idx}") @@ -173 +170 @@ def create_truncated_row_items( - logger.debug( + logging.debug( @@ -185 +182 @@ def create_truncated_row_items( - logger.debug( + logging.debug( @@ -201 +198 @@ def transform_rows( - assets_directory: Optional[str], + assets_directory: str, @@ -248 +245 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - logger.info(f"get dataset '{dataset}' split full names") + logging.info(f"get dataset '{dataset}' split full names") @@ -268 +265 @@ def get_first_rows_response( - assets_directory: Optional[str], + assets_directory: str, @@ -318 +315 @@ def get_first_rows_response( - logger.info(f"get first-rows for dataset={dataset} config={config} split={split}") + logging.info(f"get first-rows for dataset={dataset} config={config} split={split}") diff --git a/workers/first_rows/src/first_rows/utils.py b/workers/first_rows/src/first_rows/utils.py index afc2e919..fa89ffdb 100644 --- a/workers/first_rows/src/first_rows/utils.py +++ b/workers/first_rows/src/first_rows/utils.py @@ -4,0 +5 @@ import functools +import logging @@ -8 +8,0 @@ from http import HTTPStatus -from logging import Logger @@ -120 +120 @@ class UnexpectedError(WorkerCustomError): -def retry(logger: Logger): +def retry(): @@ -134 +134 @@ def retry(logger: Logger): - logger.info(f"Sleep during {duration} seconds to preventively mitigate rate limiting.") + logging.info(f"Sleep during {duration} seconds to preventively mitigate rate limiting.") @@ -138 +138 @@ def retry(logger: Logger): - logger.info("Got a ConnectionError, possibly due to rate limiting. Let's retry.") + logging.info("Got a ConnectionError, possibly due to rate limiting. Let's retry.") diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index bba181e3..ec35b4e2 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -22,2 +21,0 @@ from first_rows.utils import ( -logger = logging.getLogger(__name__) - @@ -67 +65 @@ class FirstRowsWorker(Worker): - logger.debug(f"dataset={dataset} config={config} split={split} is valid, cache updated") + logging.debug(f"dataset={dataset} config={config} split={split} is valid, cache updated") @@ -70 +68 @@ class FirstRowsWorker(Worker): - logger.debug( + logging.debug( @@ -84 +82 @@ class FirstRowsWorker(Worker): - logger.debug( + logging.debug( @@ -99 +97 @@ class FirstRowsWorker(Worker): - logger.debug( + logging.debug( diff --git a/workers/first_rows/tests/conftest.py b/workers/first_rows/tests/conftest.py index 342de477..70c7ab83 100644 --- a/workers/first_rows/tests/conftest.py +++ b/workers/first_rows/tests/conftest.py @@ -27 +27 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): -@fixture(scope="session") +@fixture(scope="session", autouse=True) diff --git a/workers/first_rows/tests/test_worker.py b/workers/first_rows/tests/test_worker.py index d8d2aff8..dba55f4f 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/first_rows/tests/test_worker.py @@ -10,2 +10,2 @@ from libcache.simple_cache import _clean_database as _clean_cache_database -from libcache.simple_cache import connect_to_cache, get_first_rows_response -from libqueue.queue import _clean_queue_database, connect_to_queue +from libcache.simple_cache import get_first_rows_response +from libqueue.queue import _clean_queue_database @@ -19,6 +18,0 @@ from .utils import get_default_config_split [email protected](autouse=True, scope="module") -def client(worker_config: WorkerConfig) -> None: - connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) - connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.queue.mongo_url) - - diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 4481f671..1b8834e4 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -823 +823 @@ name = "libcache" -version = "0.3.0" +version = "0.3.1" @@ -838 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl" @@ -850 +850 @@ name = "libcommon" -version = "0.3.1" +version = "0.3.2" @@ -862 +862 @@ type = "file" -url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl" @@ -866 +866 @@ name = "libqueue" -version = "0.4.1" +version = "0.4.2" @@ -881 +881 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl" @@ -2293 +2293 @@ python-versions = "3.9.6" -content-hash = "7205adaa92829e625bdb89fa7a67c192aaaadd242cca0d45cb2cf5aed455631d" +content-hash = "2aa2e60943104aea6952a60cdb35c4b05e790723bf35597739125c972afaa4b6" @@ -2623 +2623 @@ libcache = [ - {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, + {file = "libcache-0.3.1-py3-none-any.whl", hash = "sha256:b2e6a479961d8f5ac408ee0bd9bd4e826a9f2cbc2df973fd26b77a9263a98190"}, @@ -2627 +2627 @@ libcommon = [ - {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, + {file = "libcommon-0.3.2-py3-none-any.whl", hash = "sha256:774292c9ea2d9ab50c4bf2a3eb212bfaf924096a4279ecf117b70618702d978d"}, @@ -2630 +2630 @@ libqueue = [ - {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, + {file = "libqueue-0.4.2-py3-none-any.whl", hash = "sha256:9d5627f96ec3cd967ed4e331e4b4c5c125be2e2943fe6b10ddc927905f86c441"}, diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 14676c6f..e12a89f8 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -19,3 +19,3 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } -libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.1-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.2-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.2-py3-none-any.whl", develop = false } diff --git a/workers/splits/src/splits/config.py b/workers/splits/src/splits/config.py index c8d93404..63422109 100644 --- a/workers/splits/src/splits/config.py +++ b/workers/splits/src/splits/config.py @@ -17 +17 @@ class WorkerConfig: - self.cache = CacheConfig() + # First process the common configuration to setup the logging @@ -18,0 +19 @@ class WorkerConfig: + self.cache = CacheConfig() diff --git a/workers/splits/src/splits/main.py b/workers/splits/src/splits/main.py index 978998d6..79527175 100644 --- a/workers/splits/src/splits/main.py +++ b/workers/splits/src/splits/main.py @@ -4,4 +3,0 @@ -from libcache.simple_cache import connect_to_cache -from libcommon.logger import init_logger -from libqueue.queue import connect_to_queue - @@ -13,4 +8,0 @@ if __name__ == "__main__": - init_logger(worker_config.common.log_level) - connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) - connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.cache.mongo_url) - diff --git a/workers/splits/src/splits/response.py b/workers/splits/src/splits/response.py index 623a7a84..f906a454 100644 --- a/workers/splits/src/splits/response.py +++ b/workers/splits/src/splits/response.py @@ -19,2 +18,0 @@ from splits.utils import DatasetNotFoundError, EmptyDatasetError, SplitsNamesErr -logger = logging.getLogger(__name__) - @@ -38 +36 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - logger.info(f"get dataset '{dataset}' split full names") + logging.info(f"get dataset '{dataset}' split full names") @@ -73 +71 @@ def get_splits_response( - logger.info(f"get splits for dataset={dataset}") + logging.info(f"get splits for dataset={dataset}") diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index 2b9a8379..b1c2f4f4 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -24,2 +23,0 @@ from splits.utils import ( -logger = logging.getLogger(__name__) - @@ -50 +48 @@ class SplitsWorker(Worker): - logger.debug(f"dataset={dataset} is valid, cache updated") + logging.debug(f"dataset={dataset} is valid, cache updated") @@ -57 +55 @@ class SplitsWorker(Worker): - logger.debug( + logging.debug( @@ -63 +61 @@ class SplitsWorker(Worker): - logger.debug(f"{len(new_splits)} 'first-rows' jobs added for the splits of dataset={dataset}") + logging.debug(f"{len(new_splits)} 'first-rows' jobs added for the splits of dataset={dataset}") @@ -66 +64 @@ class SplitsWorker(Worker): - logger.debug(f"the dataset={dataset} could not be found, don't update the cache") + logging.debug(f"the dataset={dataset} could not be found, don't update the cache") @@ -76 +74 @@ class SplitsWorker(Worker): - logger.debug(f"splits response for dataset={dataset} had an error, cache updated") + logging.debug(f"splits response for dataset={dataset} had an error, cache updated") @@ -87 +85 @@ class SplitsWorker(Worker): - logger.debug(f"splits response for dataset={dataset} had a server error, cache updated") + logging.debug(f"splits response for dataset={dataset} had a server error, cache updated") diff --git a/workers/splits/tests/conftest.py b/workers/splits/tests/conftest.py index 86ac4418..a8ebe275 100644 --- a/workers/splits/tests/conftest.py +++ b/workers/splits/tests/conftest.py @@ -24 +24 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str): -@fixture(scope="session") +@fixture(scope="session", autouse=True) diff --git a/workers/splits/tests/test_worker.py b/workers/splits/tests/test_worker.py index b8e6c6e5..ab0ba460 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/splits/tests/test_worker.py @@ -9,2 +9,2 @@ from libcache.simple_cache import _clean_database as _clean_cache_database -from libcache.simple_cache import connect_to_cache, get_splits_response -from libqueue.queue import _clean_queue_database, connect_to_queue +from libcache.simple_cache import get_splits_response +from libqueue.queue import _clean_queue_database @@ -16,6 +15,0 @@ from splits.worker import SplitsWorker [email protected](autouse=True, scope="module") -def client(worker_config: WorkerConfig) -> None: - connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) - connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.queue.mongo_url) - -
705f610c44029eec8f7e98529db4550f5254297f
Sylvain Lesage
2022-10-21T09:49:54
feat: 🎸 change the number of pods (#613)
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 36999da5..a4b4fafb 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -154 +154 @@ splits: - replicas: 8 + replicas: 10 @@ -171 +171 @@ firstRows: - replicas: 34 + replicas: 32
de6909fa7699a5d3bcfa847c30f78c2d90f02cd3
Sylvain Lesage
2022-10-21T09:46:54
Manage the environment variables and configuration more robustly (#612)
diff --git a/.github/workflows/_e2e_tests.yml b/.github/workflows/_e2e_tests.yml index 28330ebf..9ce62b88 100644 --- a/.github/workflows/_e2e_tests.yml +++ b/.github/workflows/_e2e_tests.yml @@ -57 +57 @@ jobs: - ROWS_MAX_NUMBER: 4 + QUEUE_SLEEP_TIME: "1" @@ -59,4 +59,3 @@ jobs: - HF_ENDPOINT: "https://hub-ci.huggingface.co" - HF_TOKEN: "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" - IMAGE_ADMIN: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.admin}}" - IMAGE_API: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.api}}" + COMMON_HF_ENDPOINT: "https://hub-ci.huggingface.co" + COMMON_HF_TOKEN: "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" + FIRST_ROWS_MAX_NUMBER: "4" @@ -64,2 +63,4 @@ jobs: - IMAGE_WORKER_SPLITS: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.worker.splits}}" - IMAGE_WORKER_FIRST_ROWS: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.worker.firstRows}}" + IMAGE_ADMIN: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.admin}}" + IMAGE_API: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.services.api}}" + IMAGE_WORKER_SPLITS: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.workers.splits}}" + IMAGE_WORKER_FIRST_ROWS: "${{fromJson(needs.get-config.outputs.dockerConfig).dockerImage.workers.firstRows}}" @@ -73,2 +73,0 @@ jobs: - env: - ROWS_MAX_NUMBER: 4 diff --git a/.github/workflows/_unit-tests-python.yml b/.github/workflows/_unit-tests-python.yml index 08d59a00..ac854547 100644 --- a/.github/workflows/_unit-tests-python.yml +++ b/.github/workflows/_unit-tests-python.yml @@ -53,5 +53,2 @@ jobs: - MONGO_CACHE_DATABASE: datasets_server_cache_test - MONGO_QUEUE_DATABASE: datasets_server_queue_test - MONGO_URL: mongodb://localhost:${{ env.mongo-port }} - HF_TOKEN: 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 - HF_ENDPOINT: https://hub-ci.huggingface.co + CACHE_MONGO_URL: mongodb://localhost:${{ env.mongo-port }} + QUEUE_MONGO_URL: mongodb://localhost:${{ env.mongo-port }} diff --git a/.github/workflows/l-libutils.yml b/.github/workflows/l-libcommon.yml similarity index 74% rename from .github/workflows/l-libutils.yml rename to .github/workflows/l-libcommon.yml index 80b0296c..2a4d54a8 100644 --- a/.github/workflows/l-libutils.yml +++ b/.github/workflows/l-libcommon.yml @@ -4 +4 @@ -name: libs/libutils +name: libs/libcommon @@ -9,2 +9,2 @@ on: - - 'libs/libutils/**' - - '.github/workflows/l-libutils.yml' + - 'libs/libcommon/**' + - '.github/workflows/l-libcommon.yml' @@ -19 +19 @@ jobs: - working-directory: libs/libutils + working-directory: libs/libcommon @@ -23 +23 @@ jobs: - working-directory: libs/libutils + working-directory: libs/libcommon diff --git a/.vscode/monorepo.code-workspace b/.vscode/monorepo.code-workspace index 9b174f8e..5c788f47 100644 --- a/.vscode/monorepo.code-workspace +++ b/.vscode/monorepo.code-workspace @@ -20,2 +20,2 @@ - "name": "libs/libutils", - "path": "../libs/libutils" + "name": "libs/libcommon", + "path": "../libs/libcommon" diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 4bf7407b..9c30e6d7 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -32 +32 @@ If you use pyenv: -cd libs/libutils/ +cd libs/libcommon/ @@ -50 +50 @@ If you use VSCode, it might be useful to use the ["monorepo" workspace](./.vscod -The repository is structured as a monorepo, with Python applications in [services/](./services/) and Python libraries in [libs/](./libs/). +The repository is structured as a monorepo, with Python applications in [services/](./services/) and [workers/](./workers/), and Python libraries in [libs/](./libs/). @@ -58 +58 @@ The application is distributed in several components. -The precomputed responses are stored in a Mongo database called "cache" (see [libcache](./libs/libcache)). They are computed by workers ([worker](./services/worker)) which take their jobs from a job queue stored in a Mongo database called "queue" (see [libqueue](./libs/libqueue)), and store the results (error or valid response) into the "cache". +The precomputed responses are stored in a Mongo database called "cache" (see [libcache](./libs/libcache)). They are computed by [workers](./workers) which take their jobs from a job queue stored in a Mongo database called "queue" (see [libqueue](./libs/libqueue)), and store the results (error or valid response) into the "cache". @@ -67 +67 @@ Note that two job queues exist: -Note also that the workers create local files when the dataset contains images or audios. A shared directory (`ASSETS_DIRECTORY`) must therefore be provisioned with sufficient space for the generated files. The `/first-rows` endpoint responses contain URLs to these files, served by the API under the `/assets/` endpoint. +Note also that the workers create local files when the dataset contains images or audios. A shared directory (`COMMON_ASSETS_DIRECTORY`) must therefore be provisioned with sufficient space for the generated files. The `/first-rows` endpoint responses contain URLs to these files, served by the API under the `/assets/` endpoint. diff --git a/Makefile b/Makefile index c0c243fc..116e609f 100644 --- a/Makefile +++ b/Makefile @@ -28 +28 @@ install: - $(MAKE) -C libs/libutils/ install + $(MAKE) -C libs/libcommon/ install @@ -34,2 +34,2 @@ start-from-local-code: - MONGO_PORT=${LOCAL_CODE_MONGO_PORT} PORT_ADMIN=${LOCAL_CODE_PORT_ADMIN} PORT_API=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) down - MONGO_PORT=${LOCAL_CODE_MONGO_PORT} PORT_ADMIN=${LOCAL_CODE_PORT_ADMIN} PORT_API=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) up + MONGO_PORT=${LOCAL_CODE_MONGO_PORT} ADMIN_UVICORN_PORT=${LOCAL_CODE_PORT_ADMIN} API_UVICORN_PORT=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) down + MONGO_PORT=${LOCAL_CODE_MONGO_PORT} ADMIN_UVICORN_PORT=${LOCAL_CODE_PORT_ADMIN} API_UVICORN_PORT=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) up @@ -39 +39 @@ stop-from-local-code: - MONGO_PORT=${LOCAL_CODE_MONGO_PORT} PORT_ADMIN=${LOCAL_CODE_PORT_ADMIN} PORT_API=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) down + MONGO_PORT=${LOCAL_CODE_MONGO_PORT} ADMIN_UVICORN_PORT=${LOCAL_CODE_PORT_ADMIN} API_UVICORN_PORT=${LOCAL_CODE_PORT_API} PORT_REVERSE_PROXY=${LOCAL_CODE_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${LOCAL_CODE_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${LOCAL_CODE_DOCKER_COMPOSE} $(MAKE) down @@ -43,2 +43,2 @@ start-from-remote-images: - MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} PORT_ADMIN=${REMOTE_IMAGES_PORT_ADMIN} PORT_API=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) down - MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} PORT_ADMIN=${REMOTE_IMAGES_PORT_ADMIN} PORT_API=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) up + MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} ADMIN_UVICORN_PORT=${REMOTE_IMAGES_PORT_ADMIN} API_UVICORN_PORT=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) down + MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} ADMIN_UVICORN_PORT=${REMOTE_IMAGES_PORT_ADMIN} API_UVICORN_PORT=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) up @@ -48 +48 @@ stop-from-remote-images: - MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} PORT_ADMIN=${REMOTE_IMAGES_PORT_ADMIN} PORT_API=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) down + MONGO_PORT=${REMOTE_IMAGES_MONGO_PORT} ADMIN_UVICORN_PORT=${REMOTE_IMAGES_PORT_ADMIN} API_UVICORN_PORT=${REMOTE_IMAGES_PORT_API} PORT_REVERSE_PROXY=${REMOTE_IMAGES_PORT_REVERSE_PROXY} COMPOSE_PROJECT_NAME=${REMOTE_IMAGES_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${REMOTE_IMAGES_DOCKER_COMPOSE} $(MAKE) down @@ -56 +56 @@ test: - $(MAKE) -C libs/libutils/ test + $(MAKE) -C libs/libcommon/ test @@ -66 +66 @@ coverage: - $(MAKE) -C libs/libutils/ coverage + $(MAKE) -C libs/libcommon/ coverage @@ -80 +80 @@ quality: - $(MAKE) -C libs/libutils/ quality + $(MAKE) -C libs/libcommon/ quality @@ -92 +92 @@ style: - $(MAKE) -C libs/libutils/ style + $(MAKE) -C libs/libcommon/ style diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 25f5a4a7..4c52231d 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -3,2 +2,0 @@ - "admin": "huggingface/datasets-server-services-admin:sha-db1a233", - "api": "huggingface/datasets-server-services-api:sha-db1a233", @@ -6,3 +4,7 @@ - "worker": { - "splits": "huggingface/datasets-server-workers-splits:sha-68b31e3", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-f7cfa4a" + "services": { + "admin": "huggingface/datasets-server-services-admin:sha-ccfbc0c", + "api": "huggingface/datasets-server-services-api:sha-ccfbc0c", + }, + "workers": { + "splits": "huggingface/datasets-server-workers-splits:sha-ccfbc0c", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-ccfbc0c" diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml index 0c52ca0b..81380208 100644 --- a/chart/env/dev.yaml +++ b/chart/env/dev.yaml @@ -4,2 +4 @@ -mongodb: - enabled: true +# --- common parameters --- @@ -7,4 +6 @@ mongodb: -storage: - nfs: - path: "/fsx" - server: "svm-08a37cf73026f0b5c.fs-097afa9688029b62a.fsx.us-east-1.amazonaws.com" +hostname: "datasets-server.us.dev.moon.huggingface.tech" @@ -15,0 +12,5 @@ secrets: +storage: + nfs: + path: "/fsx" + server: "svm-08a37cf73026f0b5c.fs-097afa9688029b62a.fsx.us-east-1.amazonaws.com" + @@ -19 +20,16 @@ monitoring: -apiDomain: "datasets-server.us.dev.moon.huggingface.tech" +mongodb: + enabled: true + +common: + # Log level + logLevel: "DEBUG" + +# --- reverse proxy --- + +reverseProxy: + replicas: 1 + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 @@ -33,2 +49 @@ ingress: -reverseProxy: - replicas: 1 +# --- services --- @@ -35,0 +51,2 @@ reverseProxy: +admin: + replicas: 1 @@ -44 +60,0 @@ api: - @@ -51,9 +67 @@ api: -worker: - splits: - replicas: 1 - - resources: - requests: - cpu: 0.01 - limits: - cpu: 1 +# --- workers --- @@ -61,10 +69 @@ worker: - firstRows: - replicas: 1 - - resources: - requests: - cpu: 0.01 - limits: - cpu: 1 - -admin: +splits: @@ -71,0 +71,5 @@ admin: + resources: + requests: + cpu: 0.01 + limits: + cpu: 1 @@ -72,0 +77,2 @@ admin: +firstRows: + replicas: 1 diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml index 9695431f..36999da5 100644 --- a/chart/env/prod.yaml +++ b/chart/env/prod.yaml @@ -43,3 +43,7 @@ -mongodb: - enabled: false - # we use the secret instead to get the mongo URL +# --- common parameters --- + +hostname: "datasets-server.huggingface.co" + +secrets: + hfToken: hf-token + mongoUrl: mongo-url @@ -54,4 +57,0 @@ storage: -secrets: - hfToken: hf-token - mongoUrl: mongo-url - @@ -61 +61,3 @@ monitoring: -apiDomain: "datasets-server.huggingface.co" +mongodb: + # we use the secret instead to get the mongo URL + enabled: false @@ -63,11 +65,5 @@ apiDomain: "datasets-server.huggingface.co" -ingress: - annotations: - alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-1:707930574880:certificate/971187a3-2baa-40e5-bcae-94d6ec55cd24 - alb.ingress.kubernetes.io/load-balancer-name: "hub-datasets-server-prod" - alb.ingress.kubernetes.io/tags: "Env=prod,Project=datasets-server,Terraform=true" - alb.ingress.kubernetes.io/target-node-labels: role-datasets-server=true - alb.ingress.kubernetes.io/healthcheck-path: "/healthcheck" - alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80, "HTTPS": 443}]' - alb.ingress.kubernetes.io/scheme: "internet-facing" - alb.ingress.kubernetes.io/group.name: "datasets-server" - kubernetes.io/ingress.class: "alb" +common: + # Log level + logLevel: "DEBUG" + +# --- reverse proxy --- @@ -75,0 +72,2 @@ reverseProxy: + nodeSelector: + role-datasets-server: "true" @@ -77 +75,7 @@ reverseProxy: - + resources: + requests: + cpu: 1 + memory: "256Mi" + limits: + cpu: 1 + memory: "256Mi" @@ -86,4 +89,0 @@ reverseProxy: - - nodeSelector: - role-datasets-server: "true" - @@ -94,7 +94,11 @@ reverseProxy: - resources: - requests: - cpu: 1 - memory: "256Mi" - limits: - cpu: 1 - memory: "256Mi" +ingress: + annotations: + alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-1:707930574880:certificate/971187a3-2baa-40e5-bcae-94d6ec55cd24 + alb.ingress.kubernetes.io/load-balancer-name: "hub-datasets-server-prod" + alb.ingress.kubernetes.io/tags: "Env=prod,Project=datasets-server,Terraform=true" + alb.ingress.kubernetes.io/target-node-labels: role-datasets-server=true + alb.ingress.kubernetes.io/healthcheck-path: "/healthcheck" + alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80, "HTTPS": 443}]' + alb.ingress.kubernetes.io/scheme: "internet-facing" + alb.ingress.kubernetes.io/group.name: "datasets-server" + kubernetes.io/ingress.class: "alb" @@ -102,2 +106,9 @@ reverseProxy: -api: - replicas: 4 +# --- services --- + +admin: + # Number of reports in /cache-reports/... endpoints + cacheReportsNumResults: 1000 + # Number of uvicorn workers for running the application + # (2 x $num_cores) + 1 + # https://docs.gunicorn.org/en/stable/design.html#how-many-workers + uvicornNumWorkers: "9" @@ -107 +118 @@ api: - + replicas: 1 @@ -115,0 +127 @@ api: +api: @@ -119,7 +131 @@ api: - appNumWorkers: 9 - # Log level - logLevel: "DEBUG" - -worker: - splits: - replicas: 8 + uvicornNumWorkers: "9" @@ -127,13 +133,10 @@ worker: - nodeSelector: - role-datasets-server: "true" - - resources: - requests: - cpu: 1 - memory: "8Gi" - limits: - cpu: 2 - memory: "30Gi" - - # Log level - logLevel: "DEBUG" + nodeSelector: + role-datasets-server: "true" + replicas: 4 + resources: + requests: + cpu: 4 + memory: "512Mi" + limits: + cpu: 4 + memory: "4Gi" @@ -141,2 +144 @@ worker: - firstRows: - replicas: 34 +# --- workers --- @@ -144,2 +146,5 @@ worker: - nodeSelector: - role-datasets-server: "true" +splits: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same dataset + maxJobsPerDataset: 4 @@ -147,7 +152,10 @@ worker: - resources: - requests: - cpu: 1 - memory: "8Gi" - limits: - cpu: 2 - memory: "30Gi" + nodeSelector: + role-datasets-server: "true" + replicas: 8 + resources: + requests: + cpu: 1 + memory: "8Gi" + limits: + cpu: 2 + memory: "30Gi" @@ -155,2 +163,3 @@ worker: - # Log level - logLevel: "DEBUG" +firstRows: + # override the common queue parameters + queue: @@ -160,4 +168,0 @@ worker: - -admin: - replicas: 1 - @@ -166 +171 @@ admin: - + replicas: 34 @@ -169,2 +174,2 @@ admin: - cpu: 4 - memory: "512Mi" + cpu: 1 + memory: "8Gi" @@ -172,12 +177,2 @@ admin: - cpu: 4 - memory: "4Gi" - - - # Number of uvicorn workers for running the application - # (2 x $num_cores) + 1 - # https://docs.gunicorn.org/en/stable/design.html#how-many-workers - appNumWorkers: 9 - # Number of reports in /cache-reports/... endpoints - cacheReportsNumResults: 1000 - # Log level - logLevel: "DEBUG" + cpu: 2 + memory: "30Gi" diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index f0533761..5c48b155 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -52,0 +53,5 @@ app: "{{ .Release.Name }}-reverse-proxy" +{{- define "labels.admin" -}} +{{ include "labels" . }} +app: "{{ include "release" . }}-admin" +{{- end -}} + @@ -58 +63 @@ app: "{{ include "release" . }}-api" -{{- define "labels.worker.splits" -}} +{{- define "labels.splits" -}} @@ -63 +68 @@ app: "{{ include "release" . }}-worker-splits" -{{- define "labels.worker.firstRows" -}} +{{- define "labels.firstRows" -}} @@ -68,4 +72,0 @@ app: "{{ include "release" . }}-worker-first-rows" -{{- define "labels.admin" -}} -{{ include "labels" . }} -app: "{{ include "release" . }}-admin" -{{- end -}} @@ -77 +78 @@ The assets base URL -{{- printf "https://%s/assets" .Values.apiDomain }} +{{- printf "https://%s/assets" .Values.hostname }} diff --git a/chart/templates/admin/_container.tpl b/chart/templates/admin/_container.tpl deleted file mode 100644 index aa13526f..00000000 --- a/chart/templates/admin/_container.tpl +++ /dev/null @@ -1,63 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -{{- define "containerAdmin" -}} -- name: "{{ include "name" . }}-admin" - env: - - name: APP_HOSTNAME - value: {{ .Values.admin.appHostname | quote }} - - name: APP_NUM_WORKERS - value: {{ .Values.admin.appNumWorkers | quote }} - - name: APP_PORT - value: {{ .Values.admin.appPort | quote }} - - name: ASSETS_DIRECTORY - value: {{ .Values.admin.assetsDirectory | quote }} - - name: CACHE_REPORTS_NUM_RESULTS - value: {{ .Values.admin.cacheReportsNumResults | quote }} - - name: HF_ORGANIZATION - value: {{ .Values.admin.hfOrganization | quote }} - - name: HF_WHOAMI_PATH - value: {{ .Values.admin.hfWhoamiPath | quote }} - - name: LOG_LEVEL - value: {{ .Values.admin.logLevel | quote }} - - name: MAX_AGE_SHORT_SECONDS - value: {{ .Values.admin.maxAgeShortSeconds | quote }} - - name: MONGO_CACHE_DATABASE - value: {{ .Values.mongodb.cacheDatabase | quote }} - - name: MONGO_QUEUE_DATABASE - value: {{ .Values.mongodb.queueDatabase | quote }} - - name: MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: PROMETHEUS_MULTIPROC_DIR - value: {{ .Values.admin.prometheusMultiprocDirectory | quote }} - image: {{ .Values.dockerImage.admin }} - imagePullPolicy: IfNotPresent - volumeMounts: - - mountPath: {{ .Values.admin.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: false - securityContext: - allowPrivilegeEscalation: false - readinessProbe: - tcpSocket: - port: {{ .Values.admin.readinessPort }} - livenessProbe: - tcpSocket: - port: {{ .Values.admin.readinessPort }} - ports: - - containerPort: {{ .Values.admin.appPort }} - name: http - protocol: TCP - resources: - {{ toYaml .Values.admin.resources | nindent 4 }} -{{- end -}} diff --git a/chart/templates/api/_container.tpl b/chart/templates/api/_container.tpl deleted file mode 100644 index b4a88f4f..00000000 --- a/chart/templates/api/_container.tpl +++ /dev/null @@ -1,69 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -{{- define "containerApi" -}} -- name: "{{ include "name" . }}-api" - env: - - name: APP_HOSTNAME - value: {{ .Values.api.appHostname | quote }} - - name: APP_NUM_WORKERS - value: {{ .Values.api.appNumWorkers | quote }} - - name: APP_PORT - value: {{ .Values.api.appPort | quote }} - - name: ASSETS_DIRECTORY - value: {{ .Values.api.assetsDirectory | quote }} - - name: HF_ENDPOINT - value: {{ .Values.hfEndpoint | quote }} - - name: HF_TOKEN - # see https://kubernetes.io/docs/concepts/configuration/secret/#creating-a-secret - # and https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN - optional: false - - name: LOG_LEVEL - value: {{ .Values.api.logLevel | quote }} - - name: MAX_AGE_LONG_SECONDS - value: {{ .Values.api.maxAgeLongSeconds | quote }} - - name: MAX_AGE_SHORT_SECONDS - value: {{ .Values.api.maxAgeShortSeconds | quote }} - - name: MONGO_CACHE_DATABASE - value: {{ .Values.mongodb.cacheDatabase | quote }} - - name: MONGO_QUEUE_DATABASE - value: {{ .Values.mongodb.queueDatabase | quote }} - - name: MONGO_URL - {{- if .Values.mongodb.enabled }} - value: mongodb://{{.Release.Name}}-mongodb - {{- else }} - valueFrom: - secretKeyRef: - name: {{ .Values.secrets.mongoUrl | quote }} - key: MONGO_URL - optional: false - {{- end }} - - name: PROMETHEUS_MULTIPROC_DIR - value: {{ .Values.api.prometheusMultiprocDirectory | quote }} - image: {{ .Values.dockerImage.api }} - imagePullPolicy: IfNotPresent - volumeMounts: - - mountPath: {{ .Values.api.assetsDirectory | quote }} - mountPropagation: None - name: nfs - subPath: "{{ include "assets.subpath" . }}" - readOnly: true - securityContext: - allowPrivilegeEscalation: false - readinessProbe: - tcpSocket: - port: {{ .Values.api.readinessPort }} - livenessProbe: - tcpSocket: - port: {{ .Values.api.readinessPort }} - ports: - - containerPort: {{ .Values.api.appPort }} - name: http - protocol: TCP - resources: - {{ toYaml .Values.api.resources | nindent 4 }} -{{- end -}} diff --git a/chart/templates/ingress.yaml b/chart/templates/ingress.yaml index 6fc6e777..7f30b505 100644 --- a/chart/templates/ingress.yaml +++ b/chart/templates/ingress.yaml @@ -13 +13 @@ spec: - - host: {{ .Values.apiDomain }} + - host: {{ .Values.hostname }} diff --git a/chart/templates/reverse-proxy/_container.tpl b/chart/templates/reverse-proxy/_container.tpl index 7b957507..24a372c0 100644 --- a/chart/templates/reverse-proxy/_container.tpl +++ b/chart/templates/reverse-proxy/_container.tpl @@ -5 +5 @@ -- name: "{{ include "name" . }}-reverse-proxy" +- name: "{{ include "name" . }}-reverse-proxy" @@ -10 +10 @@ - value: {{ .Values.reverseProxy.assetsDirectory | quote }} + value: {{ .Values.cache.assetsDirectory | quote }} @@ -32 +32 @@ - - mountPath: {{ .Values.reverseProxy.assetsDirectory | quote }} + - mountPath: {{ .Values.cache.assetsDirectory | quote }} diff --git a/chart/templates/services/admin/_container.tpl b/chart/templates/services/admin/_container.tpl new file mode 100644 index 00000000..90e73fe2 --- /dev/null +++ b/chart/templates/services/admin/_container.tpl @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerAdmin" -}} +- name: "{{ include "name" . }}-admin" + image: {{ .Values.dockerImage.services.admin }} + imagePullPolicy: IfNotPresent + env: + - name: CACHE_ASSETS_DIRECTORY + value: {{ .Values.cache.assetsDirectory | quote }} + - name: CACHE_MONGO_DATABASE + value: {{ .Values.cache.mongoDatabase | quote }} + - name: CACHE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + - name: QUEUE_MONGO_DATABASE + value: {{ .Values.queue.mongoDatabase | quote }} + - name: QUEUE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + - name: COMMON_ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" + - name: COMMON_HF_ENDPOINT + value: {{ .Values.common.hfEndpoint | quote }} + - name: COMMON_HF_TOKEN + value: {{ .Values.secrets.hfToken | quote }} + - name: COMMON_LOG_LEVEL + value: {{ .Values.common.logLevel | quote }} + - name: ADMIN_HF_ORGANIZATION + value: {{ .Values.admin.hfOrganization | quote }} + - name: ADMIN_CACHE_REPORTS_NUM_RESULTS + value: {{ .Values.admin.cacheReportsNumResults | quote }} + - name: ADMIN_HF_WHOAMI_PATH + value: {{ .Values.admin.hfWhoamiPath | quote }} + - name: ADMIN_MAX_AGE + value: {{ .Values.admin.maxAge | quote }} + - name: ADMIN_PROMETHEUS_MULTIPROC_DIR + value: {{ .Values.admin.prometheusMultiprocDirectory | quote }} + - name: ADMIN_UVICORN_HOSTNAME + value: {{ .Values.admin.uvicornHostname | quote }} + - name: ADMIN_UVICORN_NUM_WORKERS + value: {{ .Values.admin.uvicornNumWorkers | quote }} + - name: ADMIN_UVICORN_PORT + value: {{ .Values.admin.uvicornPort | quote }} + volumeMounts: + - mountPath: {{ .Values.cache.assetsDirectory | quote }} + mountPropagation: None + name: nfs + subPath: "{{ include "assets.subpath" . }}" + readOnly: false + securityContext: + allowPrivilegeEscalation: false + readinessProbe: + tcpSocket: + port: {{ .Values.admin.readinessPort }} + livenessProbe: + tcpSocket: + port: {{ .Values.admin.readinessPort }} + ports: + - containerPort: {{ .Values.admin.uvicornPort }} + name: http + protocol: TCP + resources: + {{ toYaml .Values.admin.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/admin/deployment.yaml b/chart/templates/services/admin/deployment.yaml similarity index 100% rename from chart/templates/admin/deployment.yaml rename to chart/templates/services/admin/deployment.yaml diff --git a/chart/templates/admin/service.yaml b/chart/templates/services/admin/service.yaml similarity index 91% rename from chart/templates/admin/service.yaml rename to chart/templates/services/admin/service.yaml index 857e6d5b..57679886 100644 --- a/chart/templates/admin/service.yaml +++ b/chart/templates/services/admin/service.yaml @@ -18 +18 @@ spec: - targetPort: {{ .Values.admin.appPort }} + targetPort: {{ .Values.admin.uvicornPort }} diff --git a/chart/templates/admin/servicemonitor.yaml b/chart/templates/services/admin/servicemonitor.yaml similarity index 100% rename from chart/templates/admin/servicemonitor.yaml rename to chart/templates/services/admin/servicemonitor.yaml diff --git a/chart/templates/services/api/_container.tpl b/chart/templates/services/api/_container.tpl new file mode 100644 index 00000000..cd59e76a --- /dev/null +++ b/chart/templates/services/api/_container.tpl @@ -0,0 +1,77 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +{{- define "containerApi" -}} +- name: "{{ include "name" . }}-api" + image: {{ .Values.dockerImage.services.api }} + imagePullPolicy: IfNotPresent + env: + - name: CACHE_ASSETS_DIRECTORY + value: {{ .Values.cache.assetsDirectory | quote }} + - name: CACHE_MONGO_DATABASE + value: {{ .Values.cache.mongoDatabase | quote }} + - name: CACHE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + - name: QUEUE_MONGO_DATABASE + value: {{ .Values.queue.mongoDatabase | quote }} + - name: QUEUE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL + optional: false + {{- end }} + - name: COMMON_ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" + - name: COMMON_HF_ENDPOINT + value: {{ .Values.common.hfEndpoint | quote }} + - name: COMMON_HF_TOKEN + value: {{ .Values.secrets.hfToken | quote }} + - name: COMMON_LOG_LEVEL + value: {{ .Values.common.logLevel | quote }} + - name: API_HF_AUTH_PATH + value: {{ .Values.api.hfAuthPath | quote }} + - name: API_MAX_AGE_LONG + value: {{ .Values.api.maxAgeLong | quote }} + - name: API_MAX_AGE_SHORT + value: {{ .Values.api.maxAgeShort | quote }} + - name: API_PROMETHEUS_MULTIPROC_DIR + value: {{ .Values.api.prometheusMultiprocDirectory | quote }} + - name: API_UVICORN_HOSTNAME + value: {{ .Values.api.uvicornHostname | quote }} + - name: API_UVICORN_NUM_WORKERS + value: {{ .Values.api.uvicornNumWorkers | quote }} + - name: API_UVICORN_PORT + value: {{ .Values.api.uvicornPort | quote }} + volumeMounts: + - mountPath: {{ .Values.cache.assetsDirectory | quote }} + mountPropagation: None + name: nfs + subPath: "{{ include "assets.subpath" . }}" + readOnly: true + securityContext: + allowPrivilegeEscalation: false + readinessProbe: + tcpSocket: + port: {{ .Values.api.readinessPort }} + livenessProbe: + tcpSocket: + port: {{ .Values.api.readinessPort }} + ports: + - containerPort: {{ .Values.api.uvicornPort }} + name: http + protocol: TCP + resources: + {{ toYaml .Values.api.resources | nindent 4 }} +{{- end -}} diff --git a/chart/templates/api/deployment.yaml b/chart/templates/services/api/deployment.yaml similarity index 100% rename from chart/templates/api/deployment.yaml rename to chart/templates/services/api/deployment.yaml diff --git a/chart/templates/api/service.yaml b/chart/templates/services/api/service.yaml similarity index 91% rename from chart/templates/api/service.yaml rename to chart/templates/services/api/service.yaml index 779d2db0..73a6c302 100644 --- a/chart/templates/api/service.yaml +++ b/chart/templates/services/api/service.yaml @@ -18 +18 @@ spec: - targetPort: {{ .Values.api.appPort }} + targetPort: {{ .Values.api.uvicornPort }} diff --git a/chart/templates/api/servicemonitor.yaml b/chart/templates/services/api/servicemonitor.yaml similarity index 100% rename from chart/templates/api/servicemonitor.yaml rename to chart/templates/services/api/servicemonitor.yaml diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 6eb244e4..6bf3df0b 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -5,0 +6,2 @@ + image: {{ .Values.dockerImage.workers.firstRows }} + imagePullPolicy: IfNotPresent @@ -7,16 +9,8 @@ - - name: ASSETS_BASE_URL - value: "{{ include "assets.baseUrl" . }}" - - name: ASSETS_DIRECTORY - value: {{ .Values.worker.firstRows.assetsDirectory | quote }} - - name: HF_DATASETS_CACHE - value: "{{ .Values.worker.firstRows.cacheDirectory }}/datasets" - - name: HF_ENDPOINT - value: {{ .Values.hfEndpoint | quote }} - # note: HF_MODULES_CACHE is not set to a shared directory - - name: HF_MODULES_CACHE - value: "/tmp/modules-cache" - # the size should remain so small that we don't need to worry about putting it on an external storage - # see https://github.com/huggingface/datasets-server/issues/248 - - name: HF_TOKEN - # see https://kubernetes.io/docs/concepts/configuration/secret/#creating-a-secret - # and https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables + - name: CACHE_ASSETS_DIRECTORY + value: {{ .Values.cache.assetsDirectory | quote }} + - name: CACHE_MONGO_DATABASE + value: {{ .Values.cache.mongoDatabase | quote }} + - name: CACHE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} @@ -25,2 +19,2 @@ - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL @@ -28,17 +22,12 @@ - - name: LOG_LEVEL - value: {{ .Values.worker.firstRows.logLevel | quote }} - - name: MAX_JOBS_PER_DATASET - value: {{ .Values.worker.firstRows.maxJobsPerDataset | quote }} - - name: MAX_LOAD_PCT - value: {{ .Values.worker.firstRows.maxLoadPct | quote }} - - name: MAX_MEMORY_PCT - value: {{ .Values.worker.firstRows.maxMemoryPct | quote }} - - name: MAX_SIZE_FALLBACK - value: {{ .Values.worker.firstRows.maxSizeFallback | quote }} - - name: MIN_CELL_BYTES - value: {{ .Values.worker.firstRows.minCellBytes | quote }} - - name: MONGO_CACHE_DATABASE - value: {{ .Values.mongodb.cacheDatabase | quote }} - - name: MONGO_QUEUE_DATABASE - value: {{ .Values.mongodb.queueDatabase | quote }} - - name: MONGO_URL + {{- end }} + - name: QUEUE_MAX_JOBS_PER_DATASET + # value: {{ .Values.queue.maxJobsPerDataset | quote }} + # overridden + value: {{ .Values.firstRows.queue.maxJobsPerDataset | quote }} + - name: QUEUE_MAX_LOAD_PCT + value: {{ .Values.queue.maxLoadPct | quote }} + - name: QUEUE_MAX_MEMORY_PCT + value: {{ .Values.queue.maxMemoryPct | quote }} + - name: QUEUE_MONGO_DATABASE + value: {{ .Values.queue.mongoDatabase | quote }} + - name: QUEUE_MONGO_URL @@ -53,0 +43,16 @@ + - name: QUEUE_WORKER_SLEEP_SECONDS + value: {{ .Values.queue.sleepSeconds | quote }} + - name: COMMON_ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" + - name: COMMON_HF_ENDPOINT + value: {{ .Values.common.hfEndpoint | quote }} + - name: COMMON_HF_TOKEN + value: {{ .Values.secrets.hfToken | quote }} + - name: COMMON_LOG_LEVEL + value: {{ .Values.common.logLevel | quote }} + - name: HF_DATASETS_CACHE + value: {{ .Values.hfDatasetsCache | quote }} + - name: HF_MODULES_CACHE + value: "/tmp/modules-cache" + # the size should remain so small that we don't need to worry about putting it on an external storage + # see https://github.com/huggingface/datasets-server/issues/248 @@ -55,11 +60,11 @@ - value: {{ .Values.worker.firstRows.numbaCacheDirectory | quote }} - - name: ROWS_MAX_BYTES - value: {{ .Values.worker.firstRows.rowsMaxBytes | quote }} - - name: ROWS_MAX_NUMBER - value: {{ .Values.worker.firstRows.rowsMaxNumber | quote }} - - name: ROWS_MIN_NUMBER - value: {{ .Values.worker.firstRows.rowsMinNumber| quote }} - - name: WORKER_SLEEP_SECONDS - value: {{ .Values.worker.firstRows.workerleepSeconds | quote }} - image: {{ .Values.dockerImage.worker.firstRows }} - imagePullPolicy: IfNotPresent + value: {{ .Values.numbaCacheDirectory | quote }} + - name: FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE + value: {{ .Values.firstRows.fallbackMaxDatasetSize | quote }} + - name: FIRST_ROWS_MAX_BYTES + value: {{ .Values.firstRows.maxBytes | quote }} + - name: FIRST_ROWS_MAX_NUMBER + value: {{ .Values.firstRows.maxNumber | quote }} + - name: FIRST_ROWS_MIN_CELL_BYTES + value: {{ .Values.firstRows.minCellBytes | quote }} + - name: FIRST_ROWS_MIN_NUMBER + value: {{ .Values.firstRows.minNumber| quote }} @@ -67 +72 @@ - - mountPath: {{ .Values.worker.firstRows.assetsDirectory | quote }} + - mountPath: {{ .Values.cache.assetsDirectory | quote }} @@ -72 +77 @@ - - mountPath: {{ .Values.worker.firstRows.cacheDirectory | quote }} + - mountPath: {{ .Values.hfDatasetsCache | quote }} @@ -77 +82 @@ - - mountPath: {{ .Values.worker.firstRows.numbaCacheDirectory | quote }} + - mountPath: {{ .Values.numbaCacheDirectory | quote }} @@ -85 +90 @@ - {{ toYaml .Values.worker.firstRows.resources | nindent 4 }} + {{ toYaml .Values.firstRows.resources | nindent 4 }} diff --git a/chart/templates/worker/first-rows/deployment.yaml b/chart/templates/worker/first-rows/deployment.yaml index 7c5af3a5..49a14bff 100644 --- a/chart/templates/worker/first-rows/deployment.yaml +++ b/chart/templates/worker/first-rows/deployment.yaml @@ -8 +8 @@ metadata: - {{ include "labels.worker.firstRows" . | nindent 4 }} + {{ include "labels.firstRows" . | nindent 4 }} @@ -13 +13 @@ spec: - replicas: {{ .Values.worker.firstRows.replicas }} + replicas: {{ .Values.firstRows.replicas }} @@ -17 +17 @@ spec: - {{ include "labels.worker.firstRows" . | nindent 6 }} + {{ include "labels.firstRows" . | nindent 6 }} @@ -23 +23 @@ spec: - {{ include "labels.worker.firstRows" . | nindent 8 }} + {{ include "labels.firstRows" . | nindent 8 }} @@ -32 +32 @@ spec: - {{ toYaml .Values.worker.firstRows.nodeSelector | nindent 8 }} + {{ toYaml .Values.firstRows.nodeSelector | nindent 8 }} @@ -34 +34 @@ spec: - {{ toYaml .Values.worker.firstRows.tolerations | nindent 8 }} + {{ toYaml .Values.firstRows.tolerations | nindent 8 }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index 8a30a6ad..7e3d5187 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -5,0 +6,2 @@ + image: {{ .Values.dockerImage.workers.splits }} + imagePullPolicy: IfNotPresent @@ -7,11 +9,8 @@ - - name: HF_DATASETS_CACHE - value: "{{ .Values.worker.splits.cacheDirectory }}/datasets" - - name: HF_ENDPOINT - value: {{ .Values.hfEndpoint | quote }} - - name: HF_MODULES_CACHE - value: "/tmp/modules-cache" - # the size should remain so small that we don't need to worry about putting it on an external storage - # see https://github.com/huggingface/datasets-server/issues/248 - - name: HF_TOKEN - # see https://kubernetes.io/docs/concepts/configuration/secret/#creating-a-secret - # and https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables + - name: CACHE_ASSETS_DIRECTORY + value: {{ .Values.cache.assetsDirectory | quote }} + - name: CACHE_MONGO_DATABASE + value: {{ .Values.cache.mongoDatabase | quote }} + - name: CACHE_MONGO_URL + {{- if .Values.mongodb.enabled }} + value: mongodb://{{.Release.Name}}-mongodb + {{- else }} @@ -20,2 +19,2 @@ - name: {{ .Values.secrets.hfToken | quote }} - key: HF_TOKEN + name: {{ .Values.secrets.mongoUrl | quote }} + key: MONGO_URL @@ -23,13 +22,12 @@ - - name: LOG_LEVEL - value: {{ .Values.worker.splits.logLevel | quote }} - - name: MAX_JOBS_PER_DATASET - value: {{ .Values.worker.splits.maxJobsPerDataset | quote }} - - name: MAX_LOAD_PCT - value: {{ .Values.worker.splits.maxLoadPct | quote }} - - name: MAX_MEMORY_PCT - value: {{ .Values.worker.splits.maxMemoryPct | quote }} - - name: MONGO_CACHE_DATABASE - value: {{ .Values.mongodb.cacheDatabase | quote }} - - name: MONGO_QUEUE_DATABASE - value: {{ .Values.mongodb.queueDatabase | quote }} - - name: MONGO_URL + {{- end }} + - name: QUEUE_MAX_JOBS_PER_DATASET + # value: {{ .Values.queue.maxJobsPerDataset | quote }} + # overridden + value: {{ .Values.splits.queue.maxJobsPerDataset | quote }} + - name: QUEUE_MAX_LOAD_PCT + value: {{ .Values.queue.maxLoadPct | quote }} + - name: QUEUE_MAX_MEMORY_PCT + value: {{ .Values.queue.maxMemoryPct | quote }} + - name: QUEUE_MONGO_DATABASE + value: {{ .Values.queue.mongoDatabase | quote }} + - name: QUEUE_MONGO_URL @@ -44,0 +43,16 @@ + - name: QUEUE_WORKER_SLEEP_SECONDS + value: {{ .Values.queue.sleepSeconds | quote }} + - name: COMMON_ASSETS_BASE_URL + value: "{{ include "assets.baseUrl" . }}" + - name: COMMON_HF_ENDPOINT + value: {{ .Values.common.hfEndpoint | quote }} + - name: COMMON_HF_TOKEN + value: {{ .Values.secrets.hfToken | quote }} + - name: COMMON_LOG_LEVEL + value: {{ .Values.common.logLevel | quote }} + - name: HF_DATASETS_CACHE + value: {{ .Values.hfDatasetsCache | quote }} + - name: HF_MODULES_CACHE + value: "/tmp/modules-cache" + # the size should remain so small that we don't need to worry about putting it on an external storage + # see https://github.com/huggingface/datasets-server/issues/248 @@ -46,5 +60 @@ - value: {{ .Values.worker.splits.numbaCacheDirectory | quote }} - - name: WORKER_SLEEP_SECONDS - value: {{ .Values.worker.splits.workerleepSeconds | quote }} - image: {{ .Values.dockerImage.worker.splits }} - imagePullPolicy: IfNotPresent + value: {{ .Values.numbaCacheDirectory | quote }} @@ -52 +62 @@ - - mountPath: {{ .Values.worker.splits.cacheDirectory | quote }} + - mountPath: {{ .Values.hfDatasetsCache | quote }} @@ -57 +67 @@ - - mountPath: {{ .Values.worker.splits.numbaCacheDirectory | quote }} + - mountPath: {{ .Values.numbaCacheDirectory | quote }} @@ -65 +75 @@ - {{ toYaml .Values.worker.splits.resources | nindent 4 }} + {{ toYaml .Values.splits.resources | nindent 4 }} diff --git a/chart/templates/worker/splits/deployment.yaml b/chart/templates/worker/splits/deployment.yaml index f4ab5c0b..1fd69c87 100644 --- a/chart/templates/worker/splits/deployment.yaml +++ b/chart/templates/worker/splits/deployment.yaml @@ -8 +8 @@ metadata: - {{ include "labels.worker.splits" . | nindent 4 }} + {{ include "labels.splits" . | nindent 4 }} @@ -13 +13 @@ spec: - replicas: {{ .Values.worker.splits.replicas }} + replicas: {{ .Values.splits.replicas }} @@ -17 +17 @@ spec: - {{ include "labels.worker.splits" . | nindent 6 }} + {{ include "labels.splits" . | nindent 6 }} @@ -23 +23 @@ spec: - {{ include "labels.worker.splits" . | nindent 8 }} + {{ include "labels.splits" . | nindent 8 }} @@ -31 +31 @@ spec: - {{ toYaml .Values.worker.splits.nodeSelector | nindent 8 }} + {{ toYaml .Values.splits.nodeSelector | nindent 8 }} @@ -33 +33 @@ spec: - {{ toYaml .Values.worker.splits.tolerations | nindent 8 }} + {{ toYaml .Values.splits.tolerations | nindent 8 }} diff --git a/chart/values.yaml b/chart/values.yaml index f518bd2b..a182b3e7 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -0,0 +1,17 @@ +# --- common parameters --- + +hostname: "datasets-server.huggingface.co" + +secrets: + hfToken: datasets-server-hf-token + mongoUrl: false + +uid: 1000 +gid: 3000 + +storage: + nfs: {} + +monitoring: + enabled: false + @@ -8,4 +24,0 @@ mongodb: - # Name of the mongo db database used to cache the datasets - cacheDatabase: "datasets_server_cache" - # Name of the mongo db database used to store the jobs queue - queueDatabase: "datasets_server_queue" @@ -15,2 +27,0 @@ dockerImage: - admin: "" - api: "" @@ -18 +29,4 @@ dockerImage: - worker: + services: + admin: "" + api: "" + workers: @@ -20 +34 @@ dockerImage: - first-rows: "" + firstRows: "" @@ -22,2 +36,25 @@ dockerImage: -storage: - nfs: {} +cache: + # Directory on the shared storage (audio files and images) + assetsDirectory: "/assets" + # Name of the mongo db database used to cache the API responses + mongoDatabase: "datasets_server_cache" + +queue: + # Maximum number of jobs running at the same time for the same dataset + maxJobsPerDataset: 1 + # Max CPU load (%) - if reached, sleeps until it comes back under the limit + maxLoadPct: 0 + # Max memory (RAM + SWAP) (%) - if reached, sleeps until it comes back under the limit + maxMemoryPct: 0 + # Name of the mongo db database used to store the jobs queue + mongoDatabase: "datasets_server_queue" + # Number of seconds a worker will sleep before trying to process a new job + sleepSeconds: 15 + +common: + # base URL for the assets files. It should be set accordingly to the datasets-server domain, eg https://datasets-server.huggingface.co/assets + # assetsBaseUrl: "not used for now" + # URL of the HuggingFace Hub + hfEndpoint: "https://huggingface.co" + # Log level + logLevel: "INFO" @@ -25,3 +62,6 @@ storage: -secrets: - hfToken: datasets-server-hf-token - mongoUrl: false +# Directory where the `datasets` library will store the cached datasets data +hfDatasetsCache: "/hf-datasets-cache" +# Directory where the `datasets` library will store the cached datasets scripts +#hfModulesCache: "not used" +# Directory where the `numba` decorators (used by `librosa`) can write cache +numbaCacheDirectory: "/numba-cache" @@ -29,2 +69 @@ secrets: -monitoring: - enabled: false +# --- reverse proxy --- @@ -32,4 +71,6 @@ monitoring: -# adminDomain: "admin-datasets-server-dev.us.dev.moon.huggingface.tech" -# apiDomain: "datasets-server-dev.us.dev.moon.huggingface.tech" -uid: 1000 -gid: 3000 +reverseProxy: + host: localhost + port: 80 + nginxTemplateFile: "nginx-templates/default.conf.template" + openapiFile: "static-files/openapi.json" + error404File: "nginx-templates/404.html" @@ -37 +78,12 @@ gid: 3000 -hfEndpoint: "https://huggingface.co" + nodeSelector: {} + readinessPort: 80 + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 + service: + type: NodePort + annotations: {} + tolerations: [] @@ -47,2 +99 @@ ingress: -reverseProxy: - replicas: 1 +# --- services --- @@ -50,3 +101,18 @@ reverseProxy: - service: - type: NodePort - annotations: {} +admin: + # HF organization that is allowed to request the report + hfOrganization: "huggingface" + # Number of reports in /cache-reports/... endpoints + cacheReportsNumResults: 100 + # The path of the whoami service on the hub. + hfWhoamiPath: "/api/whoami-v2" + # Number of seconds to set in the `max-age` header on technical endpoints + maxAge: "10" + # Directory where the uvicorn workers share their prometheus metrics + # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn + prometheusMultiprocDirectory: "/tmp" + # hostname - it must not be set to localhost to work in Kube! + uvicornHostname: "0.0.0.0" + # Number of uvicorn workers for running the application + uvicornNumWorkers: "1" + # Application endpoint port + uvicornPort: 80 @@ -53,0 +120,3 @@ reverseProxy: + nodeSelector: {} + readinessPort: 80 + replicas: 1 @@ -59 +128,3 @@ reverseProxy: - nodeSelector: {} + service: + type: NodePort + annotations: {} @@ -62,9 +132,0 @@ reverseProxy: - # Directory of assets (audio files and images that will be served for the web) - assetsDirectory: "/assets" - readinessPort: 80 - host: localhost - nginxTemplateFile: "nginx-templates/default.conf.template" - openapiFile: "static-files/openapi.json" - error404File: "nginx-templates/404.html" - port: 80 - @@ -72 +134,16 @@ api: - replicas: 1 + # the path of the external authentication service on the hub. + # The string must contain `%s` which will be replaced with the dataset name. + hfAuthPath: "/api/datasets/%s/auth-check" + # Number of seconds to set in the `max-age` header on data endpoints + maxAgeLong: "120" + # Number of seconds to set in the `max-age` header on technical endpoints + maxAgeShort: "10" + # Directory where the uvicorn workers will write the prometheus metrics + # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn + prometheusMultiprocDirectory: "/tmp" + # Hostname - it must not be set to localhost to work in Kube! + uvicornHostname: "0.0.0.0" + # Number of uvicorn workers for running the application + uvicornNumWorkers: "1" + # Application endpoint port + uvicornPort: 80 @@ -73,0 +151,8 @@ api: + nodeSelector: {} + readinessPort: 80 + replicas: 1 + resources: + requests: + cpu: 1 + limits: + cpu: 1 @@ -76,0 +162 @@ api: + tolerations: [] @@ -77,0 +164,10 @@ api: +# --- workers --- + +splits: + # override the common queue parameters + queue: + # Maximum number of jobs running at the same time for the same dataset + maxJobsPerDataset: 1 + + nodeSelector: {} + replicas: 1 @@ -83 +178,0 @@ api: - nodeSelector: {} @@ -86,35 +181,13 @@ api: - # Directory of assets (audio files and images that will be served for the web) - assetsDirectory: "/assets" - readinessPort: 80 - # Application hostname - it must not be set to localhost to work in Kube! - appHostname: "0.0.0.0" - # Number of uvicorn workers for running the application - appNumWorkers: "1" - # Application endpoint port - appPort: 80 - # Log level - logLevel: "INFO" - # Number of seconds to set in the `max-age` header on data endpoints - maxAgeLongSeconds: "120" - # Number of seconds to set in the `max-age` header on technical endpoints - maxAgeShortSeconds: "10" - # Directory where the uvicorn workers will write the prometheus metrics - # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn - prometheusMultiprocDirectory: "/tmp" - -worker: - splits: - replicas: 1 - - resources: - requests: - cpu: 1 - limits: - cpu: 1 - nodeSelector: {} - tolerations: [] - - # Directory of the "datasets" library cache (for the datasets, not the modules) - cacheDirectory: "/cache" - # Log level - logLevel: "INFO" +firstRows: + # Max size (in bytes) of the dataset to fallback in normal mode if streaming fails + fallbackMaxDatasetSize: "100_000_000" + # Max size of the /first-rows endpoint response in bytes + maxBytes: "1_000_000" + # Max number of rows in the /first-rows endpoint response + maxNumber: 100 + # Min size of a cell in the /first-rows endpoint response in bytes + minCellBytes: 100 + # Min number of rows in the /first-rows endpoint response + minNumber: 10 + # override the common queue parameters + queue: @@ -123,48 +195,0 @@ worker: - # Max CPU load (%) - if reached, sleeps until it comes back under the limit - maxLoadPct: 0 - # Max memory (RAM + SWAP) (%) - if reached, sleeps until it comes back under the limit - maxMemoryPct: 0 - # Directory of the "numba" library cache - numbaCacheDirectory: "/numba-cache" - # Number of seconds a worker will sleep before trying to process a new job - workerSleepSeconds: 15 - - firstRows: - replicas: 1 - - resources: - requests: - cpu: 1 - limits: - cpu: 1 - nodeSelector: {} - tolerations: [] - - # Directory of assets (audio files and images that will be served for the web) - assetsDirectory: "/assets" - # Directory of the "datasets" library cache (for the datasets, not the modules) - cacheDirectory: "/cache" - # User Access Token (see https://huggingface.co/settings/token, only the `read` role is required) - hfToken: "" - # Log level - logLevel: "INFO" - # Maximum number of jobs running at the same time for the same dataset - maxJobsPerDataset: 1 - # Max CPU load (%) - if reached, sleeps until it comes back under the limit - maxLoadPct: 0 - # Max memory (RAM + SWAP) (%) - if reached, sleeps until it comes back under the limit - maxMemoryPct: 0 - # Max size (in bytes) of the dataset to fallback in normal mode if streaming fails - maxSizeFallback: "100_000_000" - # Min size of a cell in the /first-rows endpoint response in bytes - minCellBytes: 100 - # Directory of the "numba" library cache - numbaCacheDirectory: "/numba-cache" - # Max size of the /first-rows endpoint response in bytes - rowMaxBytes: "1_000_000" - # Max number of rows in the /first-rows endpoint response - rowsMaxNumber: 100 - # Min number of rows in the /first-rows endpoint response - rowsMinNumber: 10 - # Number of seconds a worker will sleep before trying to process a new job - workerSleepSeconds: 15 @@ -172 +197 @@ worker: -admin: + nodeSelector: {} @@ -174,5 +198,0 @@ admin: - - service: - type: NodePort - annotations: {} - @@ -184 +203,0 @@ admin: - nodeSelector: {} @@ -186,24 +204,0 @@ admin: - - # Application hostname - it must not be set to localhost to work in Kube! - appHostname: "0.0.0.0" - # Number of uvicorn workers for running the application - appNumWorkers: "1" - # Application endpoint port - appPort: 80 - # Directory of assets (audio files and images that will be served for the web) - assetsDirectory: "/assets" - # Number of reports in /cache-reports/... endpoints - cacheReportsNumResults: 100 - # HF organization - hfOrganization: "huggingface" - # External authentication path. - hfWhoamiPath: "/api/whoami-v2" - # Log level - logLevel: "INFO" - # Number of seconds to set in the `max-age` header on technical endpoints - maxAgeShortSeconds: "10" - # Directory where the uvicorn workers share their prometheus metrics - # see https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn - prometheusMultiprocDirectory: "/tmp" - - readinessPort: 80 diff --git a/docs/source/server.mdx b/docs/source/server.mdx index 8ed2262d..6421a173 100644 --- a/docs/source/server.mdx +++ b/docs/source/server.mdx @@ -27 +27 @@ Workers are responsible for executing the jobs in the queue. They complete the a -Take a look at the [worker configuration](https://github.com/huggingface/datasets-server/tree/main/services/worker#configuration) for a complete list of the environment variables if you're interested in learning more. +Take a look at the [workers configuration](https://github.com/huggingface/datasets-server/tree/main/workers/first_rows#configuration) for a complete list of the environment variables if you're interested in learning more. diff --git a/e2e/Makefile b/e2e/Makefile index 62320d57..ab5c215e 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -2,2 +2 @@ -export PORT_ADMIN := 9081 -export PORT_API := 9080 +export COMPOSE_PROJECT_NAME := e2e @@ -5,5 +4,8 @@ export PORT_REVERSE_PROXY := 9000 -export TEST_MONGO_PORT := 27050 -export TEST_ROWS_MAX_NUMBER := 4 -export TEST_COMPOSE_PROJECT_NAME := e2e -export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co -export TEST_HF_TOKEN := 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 +export MONGO_PORT := 27050 +export QUEUE_SLEEP_TIME := 1 +export COMMON_HF_ENDPOINT := https://hub-ci.huggingface.co +export COMMON_HF_TOKEN := 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 +export ADMIN_UVICORN_PORT := 9081 +export API_UVICORN_PORT := 9080 +export FIRST_ROWS_MAX_NUMBER := 4 + @@ -11 +13 @@ export TEST_HF_TOKEN := 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 -TEST_DOCKER_COMPOSE := ../tools/docker-compose-datasets-server-from-remote-images.yml +DOCKER_COMPOSE := ../tools/docker-compose-datasets-server-from-remote-images.yml diff --git a/e2e/tests/utils.py b/e2e/tests/utils.py index 4329f372..c328ea12 100644 --- a/e2e/tests/utils.py +++ b/e2e/tests/utils.py @@ -14 +13,0 @@ PORT_REVERSE_PROXY = os.environ.get("PORT_REVERSE_PROXY", "8000") -ROWS_MAX_NUMBER = int(os.environ.get("ROWS_MAX_NUMBER", 100)) diff --git a/libs/libcache/Makefile b/libs/libcache/Makefile index 06804f6e..bc619378 100644 --- a/libs/libcache/Makefile +++ b/libs/libcache/Makefile @@ -2,3 +2,3 @@ -export TEST_MONGO_PORT := 27020 -export TEST_MONGO_CACHE_DATABASE := datasets_server_cache_test -export TEST_COMPOSE_PROJECT_NAME := libcache +export MONGO_PORT := 27020 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export COMPOSE_PROJECT_NAME := libcache @@ -6 +6 @@ export TEST_COMPOSE_PROJECT_NAME := libcache -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml diff --git a/libs/libcache/README.md b/libs/libcache/README.md index e0e19f19..711d278d 100644 --- a/libs/libcache/README.md +++ b/libs/libcache/README.md @@ -3,0 +4,8 @@ A Python library to manage the storage of precomputed API responses in a mongo d + +## Configuration + +Set environment variables to configure the following aspects: + +- `CACHE_ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, in which case the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. +- `CACHE_MONGO_DATABASE`: the name of the database used for storing the cache. Defaults to `"datasets_server_cache"`. +- `CACHE_MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. diff --git a/libs/libcache/dist/libcache-0.3.0-py3-none-any.whl b/libs/libcache/dist/libcache-0.3.0-py3-none-any.whl new file mode 100644 index 00000000..a89d3efa Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.0-py3-none-any.whl differ diff --git a/libs/libcache/dist/libcache-0.3.0.tar.gz b/libs/libcache/dist/libcache-0.3.0.tar.gz new file mode 100644 index 00000000..bb2264fd Binary files /dev/null and b/libs/libcache/dist/libcache-0.3.0.tar.gz differ diff --git a/libs/libcache/poetry.lock b/libs/libcache/poetry.lock index 2271de73..c377113a 100644 --- a/libs/libcache/poetry.lock +++ b/libs/libcache/poetry.lock @@ -148,0 +149,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -213,0 +232,17 @@ plugins = ["setuptools"] +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + @@ -269 +304 @@ description = "Core utilities for Python packages" -category = "dev" +category = "main" @@ -376 +411 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "dev" +category = "main" @@ -419,0 +455,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -563 +609 @@ python-versions = "3.9.6" -content-hash = "9d23548c6080d98161b06542a06f9c62c3b87b36537db14e32b6988c58639652" +content-hash = "56acd504dbc619f334ffaff80146c1e7564234fc8d903a6e2c93f2a0ec3db22a" @@ -670,0 +717 @@ dparse = [] +environs = [] @@ -694,0 +742 @@ isort = [ +marshmallow = [] @@ -891,0 +940 @@ pytest-cov = [ +python-dotenv = [] diff --git a/libs/libcache/pyproject.toml b/libs/libcache/pyproject.toml index 65cbe938..1b36b3b2 100644 --- a/libs/libcache/pyproject.toml +++ b/libs/libcache/pyproject.toml @@ -5 +5 @@ name = "libcache" -version = "0.2.2" +version = "0.3.0" @@ -9,0 +10 @@ appdirs = "^1.4.4" +environs = "^9.5.0" diff --git a/libs/libcache/src/libcache/config.py b/libs/libcache/src/libcache/config.py new file mode 100644 index 00000000..4163e98c --- /dev/null +++ b/libs/libcache/src/libcache/config.py @@ -0,0 +1,19 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Optional + +from environs import Env + + +class CacheConfig: + assets_directory: Optional[str] + mongo_database: str + mongo_url: str + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("CACHE_"): + self.assets_directory = env.str(name="ASSETS_DIRECTORY", default=None) + self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_cache") + self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") diff --git a/libs/libcache/tests/_utils.py b/libs/libcache/tests/_utils.py deleted file mode 100644 index 1dafee6d..00000000 --- a/libs/libcache/tests/_utils.py +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import os - -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache_test" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27017" - - -def get_str_value(d: os._Environ[str], key: str, default: str) -> str: - if key not in d: - return default - value = str(d.get(key)).strip() - return value or default - - -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) diff --git a/libs/libcache/tests/conftest.py b/libs/libcache/tests/conftest.py new file mode 100644 index 00000000..e6b69f1f --- /dev/null +++ b/libs/libcache/tests/conftest.py @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from pytest import MonkeyPatch, fixture + +from libcache.config import CacheConfig + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session") +def cache_config(monkeypatch_session: MonkeyPatch) -> CacheConfig: + cache_config = CacheConfig() + if "test" not in cache_config.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return cache_config diff --git a/libs/libcache/tests/test_simple_cache.py b/libs/libcache/tests/test_simple_cache.py index 939c9499..661da294 100644 --- a/libs/libcache/tests/test_simple_cache.py +++ b/libs/libcache/tests/test_simple_cache.py @@ -9,0 +10 @@ from pymongo.errors import DocumentTooLarge +from libcache.config import CacheConfig @@ -34,8 +34,0 @@ from libcache.simple_cache import ( -from ._utils import MONGO_CACHE_DATABASE, MONGO_URL - - [email protected](autouse=True, scope="module") -def safe_guard() -> None: - if "test" not in MONGO_CACHE_DATABASE: - raise ValueError("Test must be launched on a test mongo database") - @@ -44,2 +37,2 @@ def safe_guard() -> None: -def client() -> None: - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) +def client(cache_config: CacheConfig) -> None: + connect_to_cache(database=cache_config.mongo_database, host=cache_config.mongo_url) diff --git a/libs/libutils/.flake8 b/libs/libcommon/.flake8 similarity index 100% rename from libs/libutils/.flake8 rename to libs/libcommon/.flake8 diff --git a/libs/libutils/.python-version b/libs/libcommon/.python-version similarity index 100% rename from libs/libutils/.python-version rename to libs/libcommon/.python-version diff --git a/libs/libutils/Makefile b/libs/libcommon/Makefile similarity index 58% rename from libs/libutils/Makefile rename to libs/libcommon/Makefile index 8143734e..3f0a3e34 100644 --- a/libs/libutils/Makefile +++ b/libs/libcommon/Makefile @@ -2 +2 @@ -export TEST_COMPOSE_PROJECT_NAME := libutils +export COMPOSE_PROJECT_NAME := libcommon @@ -4 +4 @@ export TEST_COMPOSE_PROJECT_NAME := libutils -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-empty.yml +DOCKER_COMPOSE := ../../tools/docker-compose-empty.yml diff --git a/libs/libcommon/README.md b/libs/libcommon/README.md new file mode 100644 index 00000000..b75af640 --- /dev/null +++ b/libs/libcommon/README.md @@ -0,0 +1,12 @@ +# libcommon + +A Python library with common code (configuration, utils, logger, exceptions) used by the services and the workers + +## Configuration + +Set environment variables to configure the following aspects: + +- `COMMON_ASSETS_BASE_URL`: base URL for the assets files. It should be set accordingly to the datasets-server domain, eg https://datasets-server.huggingface.co/assets. Defaults to `assets`. +- `COMMON_HF_ENDPOINT`: URL of the HuggingFace Hub. Defaults to `https://huggingface.co`. +- `COMMON_HF_TOKEN`: App Access Token (ask moonlanding administrators to get one, only the `read` role is required), to access the gated datasets. Defaults to empty. +- `COMMON_LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. Defaults to `INFO`. diff --git a/libs/libcommon/dist/libcommon-0.3.0-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.0-py3-none-any.whl new file mode 100644 index 00000000..d379c233 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.0-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.3.0.tar.gz b/libs/libcommon/dist/libcommon-0.3.0.tar.gz new file mode 100644 index 00000000..23972716 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.0.tar.gz differ diff --git a/libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl b/libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl new file mode 100644 index 00000000..22a2e7b7 Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl differ diff --git a/libs/libcommon/dist/libcommon-0.3.1.tar.gz b/libs/libcommon/dist/libcommon-0.3.1.tar.gz new file mode 100644 index 00000000..69940fdf Binary files /dev/null and b/libs/libcommon/dist/libcommon-0.3.1.tar.gz differ diff --git a/libs/libutils/dist/libutils-0.1.0-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.0-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.0-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.0-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.0.tar.gz b/libs/libcommon/dist/libutils-0.1.0.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.0.tar.gz rename to libs/libcommon/dist/libutils-0.1.0.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.1-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.1-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.1-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.1-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.1.tar.gz b/libs/libcommon/dist/libutils-0.1.1.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.1.tar.gz rename to libs/libcommon/dist/libutils-0.1.1.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.10-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.10-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.10-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.10-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.10.tar.gz b/libs/libcommon/dist/libutils-0.1.10.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.10.tar.gz rename to libs/libcommon/dist/libutils-0.1.10.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.11-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.11-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.11-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.11-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.11.tar.gz b/libs/libcommon/dist/libutils-0.1.11.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.11.tar.gz rename to libs/libcommon/dist/libutils-0.1.11.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.2-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.2-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.2-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.2-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.2.tar.gz b/libs/libcommon/dist/libutils-0.1.2.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.2.tar.gz rename to libs/libcommon/dist/libutils-0.1.2.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.3-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.3-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.3-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.3-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.3.tar.gz b/libs/libcommon/dist/libutils-0.1.3.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.3.tar.gz rename to libs/libcommon/dist/libutils-0.1.3.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.4-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.4-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.4-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.4-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.4.tar.gz b/libs/libcommon/dist/libutils-0.1.4.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.4.tar.gz rename to libs/libcommon/dist/libutils-0.1.4.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.5-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.5-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.5-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.5-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.5.tar.gz b/libs/libcommon/dist/libutils-0.1.5.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.5.tar.gz rename to libs/libcommon/dist/libutils-0.1.5.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.6-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.6-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.6-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.6-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.6.tar.gz b/libs/libcommon/dist/libutils-0.1.6.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.6.tar.gz rename to libs/libcommon/dist/libutils-0.1.6.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.7-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.7-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.7-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.7-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.7.tar.gz b/libs/libcommon/dist/libutils-0.1.7.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.7.tar.gz rename to libs/libcommon/dist/libutils-0.1.7.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.8-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.8-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.8-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.8-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.8.tar.gz b/libs/libcommon/dist/libutils-0.1.8.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.8.tar.gz rename to libs/libcommon/dist/libutils-0.1.8.tar.gz diff --git a/libs/libutils/dist/libutils-0.1.9-py3-none-any.whl b/libs/libcommon/dist/libutils-0.1.9-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.1.9-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.1.9-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.1.9.tar.gz b/libs/libcommon/dist/libutils-0.1.9.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.1.9.tar.gz rename to libs/libcommon/dist/libutils-0.1.9.tar.gz diff --git a/libs/libutils/dist/libutils-0.2.0-py3-none-any.whl b/libs/libcommon/dist/libutils-0.2.0-py3-none-any.whl similarity index 100% rename from libs/libutils/dist/libutils-0.2.0-py3-none-any.whl rename to libs/libcommon/dist/libutils-0.2.0-py3-none-any.whl diff --git a/libs/libutils/dist/libutils-0.2.0.tar.gz b/libs/libcommon/dist/libutils-0.2.0.tar.gz similarity index 100% rename from libs/libutils/dist/libutils-0.2.0.tar.gz rename to libs/libcommon/dist/libutils-0.2.0.tar.gz diff --git a/libs/libutils/poetry.lock b/libs/libcommon/poetry.lock similarity index 96% rename from libs/libutils/poetry.lock rename to libs/libcommon/poetry.lock index b1d5e7b4..80ad9fc4 100644 --- a/libs/libutils/poetry.lock +++ b/libs/libcommon/poetry.lock @@ -1,17 +0,0 @@ -[[package]] -name = "anyio" -version = "3.6.1" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16)"] - @@ -145,0 +129,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -185 +186 @@ description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" +category = "dev" @@ -210,0 +212,17 @@ plugins = ["setuptools"] +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + @@ -255 +273 @@ description = "Core utilities for Python packages" -category = "dev" +category = "main" @@ -341 +359 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "dev" +category = "main" @@ -384,0 +403,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -461,22 +489,0 @@ python-versions = ">=3.6" -[[package]] -name = "sniffio" -version = "1.2.0" -description = "Sniff out which async library your code is running under" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "starlette" -version = "0.16.0" -description = "The little ASGI library that shines." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -anyio = ">=3.0.0,<4" - -[package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests", "graphene"] - @@ -550 +557 @@ python-versions = "3.9.6" -content-hash = "9a279328f837432c5d45f1f4d3cc2d94db7746a5433495618eea4997093d6e05" +content-hash = "7b89504d8d03a71e3927157f76aeb95ca9ed3ff00bea023000b18ac70f679c13" @@ -553,4 +559,0 @@ content-hash = "9a279328f837432c5d45f1f4d3cc2d94db7746a5433495618eea4997093d6e05 -anyio = [ - {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, - {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, -] @@ -653,0 +657 @@ dparse = [] +environs = [] @@ -677,0 +682 @@ isort = [ +marshmallow = [] @@ -792,0 +798 @@ pytest-cov = [ +python-dotenv = [] @@ -839,8 +844,0 @@ smmap = [ -sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, -] -starlette = [ - {file = "starlette-0.16.0-py3-none-any.whl", hash = "sha256:38eb24bf705a2c317e15868e384c1b8a12ca396e5a3c3a003db7e667c43f939f"}, - {file = "starlette-0.16.0.tar.gz", hash = "sha256:e1904b5d0007aee24bdd3c43994be9b3b729f4f58e740200de1d623f8c3a8870"}, -] diff --git a/libs/libutils/poetry.toml b/libs/libcommon/poetry.toml similarity index 100% rename from libs/libutils/poetry.toml rename to libs/libcommon/poetry.toml diff --git a/libs/libutils/pyproject.toml b/libs/libcommon/pyproject.toml similarity index 80% rename from libs/libutils/pyproject.toml rename to libs/libcommon/pyproject.toml index 6d4ac70b..627c64e7 100644 --- a/libs/libutils/pyproject.toml +++ b/libs/libcommon/pyproject.toml @@ -3,3 +3,3 @@ authors = ["Sylvain Lesage <[email protected]>"] -description = "Library for utils" -name = "libutils" -version = "0.2.0" +description = "Library for utils, common to all the services and workers" +name = "libcommon" +version = "0.3.1" @@ -8,0 +9 @@ license = "Apache-2.0" +environs = "^9.5.0" @@ -11 +11,0 @@ python = "3.9.6" -starlette = "^0.16.0" @@ -32 +32 @@ filterwarnings = ["ignore::DeprecationWarning"] -source = ["libutils"] +source = ["libcommon"] diff --git a/libs/libutils/src/libutils/__init__.py b/libs/libcommon/src/libcommon/__init__.py similarity index 100% rename from libs/libutils/src/libutils/__init__.py rename to libs/libcommon/src/libcommon/__init__.py diff --git a/libs/libcommon/src/libcommon/config.py b/libs/libcommon/src/libcommon/config.py new file mode 100644 index 00000000..fb70f744 --- /dev/null +++ b/libs/libcommon/src/libcommon/config.py @@ -0,0 +1,22 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from typing import Optional + +from environs import Env + + +class CommonConfig: + assets_base_url: str + hf_endpoint: str + hf_token: Optional[str] + log_level: int + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("COMMON_"): + self.assets_base_url = env.str(name="ASSETS_BASE_URL", default="assets") + self.hf_endpoint = env.str(name="HF_ENDPOINT", default="https://huggingface.co") + self.log_level = env.log_level(name="LOG_LEVEL", default="INFO") + hf_token = env.str(name="HF_TOKEN", default="") + self.hf_token = None if hf_token == "" else hf_token # nosec diff --git a/libs/libutils/src/libutils/exceptions.py b/libs/libcommon/src/libcommon/exceptions.py similarity index 100% rename from libs/libutils/src/libutils/exceptions.py rename to libs/libcommon/src/libcommon/exceptions.py diff --git a/libs/libutils/src/libutils/logger.py b/libs/libcommon/src/libcommon/logger.py similarity index 84% rename from libs/libutils/src/libutils/logger.py rename to libs/libcommon/src/libcommon/logger.py index 3e406028..2b7ee030 100644 --- a/libs/libutils/src/libutils/logger.py +++ b/libs/libcommon/src/libcommon/logger.py @@ -7 +7 @@ import logging -def init_logger(log_level: str = "INFO", name: str = "datasets_server") -> None: +def init_logger(log_level: int = logging.INFO, name: str = "datasets_server") -> None: diff --git a/libs/libutils/src/libutils/py.typed b/libs/libcommon/src/libcommon/py.typed similarity index 100% rename from libs/libutils/src/libutils/py.typed rename to libs/libcommon/src/libcommon/py.typed diff --git a/libs/libcommon/src/libcommon/utils.py b/libs/libcommon/src/libcommon/utils.py new file mode 100644 index 00000000..2005fcb5 --- /dev/null +++ b/libs/libcommon/src/libcommon/utils.py @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import base64 +from typing import Any + +import orjson + + +# orjson is used to get rid of errors with datetime (see allenai/c4) +def orjson_default(obj: Any) -> Any: + if isinstance(obj, bytes): + return base64.b64encode(obj).decode("utf-8") + raise TypeError + + +def orjson_dumps(content: Any) -> bytes: + return orjson.dumps(content, option=orjson.OPT_UTC_Z, default=orjson_default) diff --git a/libs/libutils/tests/__init__.py b/libs/libcommon/tests/__init__.py similarity index 100% rename from libs/libutils/tests/__init__.py rename to libs/libcommon/tests/__init__.py diff --git a/libs/libcommon/tests/conftest.py b/libs/libcommon/tests/conftest.py new file mode 100644 index 00000000..404c08b6 --- /dev/null +++ b/libs/libcommon/tests/conftest.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +import pytest + +from libcommon.config import CommonConfig + + [email protected](scope="session") +def common_config(): + return CommonConfig() diff --git a/libs/libcommon/tests/test_config.py b/libs/libcommon/tests/test_config.py new file mode 100644 index 00000000..721ace56 --- /dev/null +++ b/libs/libcommon/tests/test_config.py @@ -0,0 +1,8 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from libcommon.config import CommonConfig + + +def test_common_config(common_config: CommonConfig): + assert common_config.log_level == 20 diff --git a/libs/libqueue/Makefile b/libs/libqueue/Makefile index 4512b343..aa5d9582 100644 --- a/libs/libqueue/Makefile +++ b/libs/libqueue/Makefile @@ -2,3 +2,3 @@ -export TEST_MONGO_PORT := 27021 -export TEST_MONGO_QUEUE_DATABASE := datasets_server_queue_test -export TEST_COMPOSE_PROJECT_NAME := libqueue +export COMPOSE_PROJECT_NAME := libqueue +export MONGO_PORT := 27021 +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} @@ -6 +6 @@ export TEST_COMPOSE_PROJECT_NAME := libqueue -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml diff --git a/libs/libqueue/README.md b/libs/libqueue/README.md index 3a7deef6..6ef2c40e 100644 --- a/libs/libqueue/README.md +++ b/libs/libqueue/README.md @@ -3,0 +4,11 @@ A Python library to manage the job queues to precompute API responses. The job q + +## Configuration + +Set environment variables to configure the following aspects: + +- `QUEUE_MAX_JOBS_PER_DATASET`: the maximum number of started jobs for the same dataset. Defaults to 1. +- `QUEUE_MAX_LOAD_PCT`: the maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of cpus \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. +- `QUEUE_MAX_MEMORY_PCT`: the maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. +- `QUEUE_MONGO_DATABASE`: the name of the database used for storing the queue. Defaults to `"datasets_server_queue"`. +- `QUEUE_MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. +- `QUEUE_SLEEP_SECONDS`: duration in seconds of a worker wait loop iteration, before checking if resources are available and processing a job if any is available. Note that the worker does not sleep on the first loop after finishing a job. Defaults to `15`. diff --git a/libs/libqueue/dist/libqueue-0.4.0-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.0-py3-none-any.whl new file mode 100644 index 00000000..9e8e9a71 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.0-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.0.tar.gz b/libs/libqueue/dist/libqueue-0.4.0.tar.gz new file mode 100644 index 00000000..d2010c6c Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.0.tar.gz differ diff --git a/libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl b/libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl new file mode 100644 index 00000000..5fda0253 Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl differ diff --git a/libs/libqueue/dist/libqueue-0.4.1.tar.gz b/libs/libqueue/dist/libqueue-0.4.1.tar.gz new file mode 100644 index 00000000..df179d6f Binary files /dev/null and b/libs/libqueue/dist/libqueue-0.4.1.tar.gz differ diff --git a/libs/libqueue/poetry.lock b/libs/libqueue/poetry.lock index 9a8d90c5..20f3b444 100644 --- a/libs/libqueue/poetry.lock +++ b/libs/libqueue/poetry.lock @@ -140,0 +141,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -205,0 +224,17 @@ plugins = ["setuptools"] +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + @@ -261 +296 @@ description = "Core utilities for Python packages" -category = "dev" +category = "main" @@ -379 +414 @@ description = "pyparsing module - Classes and methods to define and execute pars -category = "dev" +category = "main" @@ -422,0 +458,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -574 +620 @@ python-versions = "3.9.6" -content-hash = "f1e5c2314c537ad7fe31443ebb167ebbb89dd978e1176f75798602de0616e9b1" +content-hash = "9e0fbfb54d61767cd7d6c92f871004acf033be695b75f6c262e9c758ab094c82" @@ -677,0 +724 @@ dparse = [] +environs = [] @@ -701,0 +749 @@ isort = [ +marshmallow = [] @@ -899,0 +948 @@ pytest-cov = [ +python-dotenv = [] diff --git a/libs/libqueue/pyproject.toml b/libs/libqueue/pyproject.toml index b148a1bb..931ee49f 100644 --- a/libs/libqueue/pyproject.toml +++ b/libs/libqueue/pyproject.toml @@ -5 +5 @@ name = "libqueue" -version = "0.3.2" +version = "0.4.1" @@ -8,0 +9 @@ license = "Apache-2.0" +environs = "^9.5.0" diff --git a/libs/libqueue/src/libqueue/config.py b/libs/libqueue/src/libqueue/config.py new file mode 100644 index 00000000..b10d6f12 --- /dev/null +++ b/libs/libqueue/src/libqueue/config.py @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from environs import Env + + +class QueueConfig: + max_jobs_per_dataset: int + max_load_pct: int + max_memory_pct: int + mongo_database: str + mongo_url: str + sleep_seconds: int + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("QUEUE_"): + self.mongo_database = env.str(name="MONGO_DATABASE", default="datasets_server_queue") + self.mongo_url = env.str(name="MONGO_URL", default="mongodb://localhost:27017") + self.max_jobs_per_dataset = env.int(name="MAX_JOBS_PER_DATASET", default=1) + self.max_load_pct = env.int(name="MAX_LOAD_PCT", default=70) + self.max_memory_pct = env.int(name="MAX_MEMORY_PCT", default=80) + self.sleep_seconds = env.int(name="SLEEP_SECONDS", default=15) diff --git a/libs/libqueue/src/libqueue/constants.py b/libs/libqueue/src/libqueue/constants.py deleted file mode 100644 index aff12f95..00000000 --- a/libs/libqueue/src/libqueue/constants.py +++ /dev/null @@ -1,6 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -DEFAULT_MAX_LOAD_PCT: int = 70 -DEFAULT_MAX_MEMORY_PCT: int = 80 -DEFAULT_WORKER_SLEEP_SECONDS: int = 15 diff --git a/libs/libqueue/src/libqueue/worker.py b/libs/libqueue/src/libqueue/worker.py index e495dcf4..802eb866 100644 --- a/libs/libqueue/src/libqueue/worker.py +++ b/libs/libqueue/src/libqueue/worker.py @@ -11,0 +12 @@ from psutil import cpu_count, getloadavg, swap_memory, virtual_memory +from libqueue.config import QueueConfig @@ -14,6 +14,0 @@ from libqueue.queue import EmptyQueue, Queue -from .constants import ( - DEFAULT_MAX_LOAD_PCT, - DEFAULT_MAX_MEMORY_PCT, - DEFAULT_WORKER_SLEEP_SECONDS, -) - @@ -24,3 +19 @@ class Worker(ABC): - max_load_pct: int - max_memory_pct: int - sleep_seconds: int + queue_config: QueueConfig @@ -33,9 +26,2 @@ class Worker(ABC): - def __init__( - self, - max_load_pct: Optional[int] = None, - max_memory_pct: Optional[int] = None, - sleep_seconds: Optional[int] = None, - ) -> None: - self.max_load_pct = DEFAULT_MAX_LOAD_PCT if max_load_pct is None else max_load_pct - self.max_memory_pct = DEFAULT_MAX_MEMORY_PCT if max_memory_pct is None else max_memory_pct - self.sleep_seconds = DEFAULT_WORKER_SLEEP_SECONDS if sleep_seconds is None else sleep_seconds + def __init__(self, queue_config: QueueConfig) -> None: + self.queue_config = queue_config @@ -44 +30 @@ class Worker(ABC): - if self.max_memory_pct <= 0: + if self.queue_config.max_memory_pct <= 0: @@ -49 +35 @@ class Worker(ABC): - ok = percent < self.max_memory_pct + ok = percent < self.queue_config.max_memory_pct @@ -51 +37,3 @@ class Worker(ABC): - logger.info(f"memory usage (RAM + SWAP) is too high: {percent:.0f}% - max is {self.max_memory_pct}%") + logger.info( + f"memory usage (RAM + SWAP) is too high: {percent:.0f}% - max is {self.queue_config.max_memory_pct}%" + ) @@ -55 +43 @@ class Worker(ABC): - if self.max_load_pct <= 0: + if self.queue_config.max_load_pct <= 0: @@ -59 +47 @@ class Worker(ABC): - ok = load_pct < self.max_load_pct + ok = load_pct < self.queue_config.max_load_pct @@ -61 +49 @@ class Worker(ABC): - logger.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.max_load_pct}%") + logger.info(f"cpu load is too high: {load_pct:.0f}% - max is {self.queue_config.max_load_pct}%") @@ -67 +55 @@ class Worker(ABC): - duration = self.sleep_seconds * jitter + duration = self.queue_config.sleep_seconds * jitter diff --git a/libs/libqueue/tests/_utils.py b/libs/libqueue/tests/_utils.py deleted file mode 100644 index 7b725f1b..00000000 --- a/libs/libqueue/tests/_utils.py +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import os - -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue_test" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27017" - - -def get_str_value(d: os._Environ[str], key: str, default: str) -> str: - if key not in d: - return default - value = str(d.get(key)).strip() - return value or default - - -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) diff --git a/libs/libqueue/tests/conftest.py b/libs/libqueue/tests/conftest.py new file mode 100644 index 00000000..4e8c8ff0 --- /dev/null +++ b/libs/libqueue/tests/conftest.py @@ -0,0 +1,23 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 The HuggingFace Authors. + +from pytest import MonkeyPatch, fixture + +from libqueue.config import QueueConfig + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session") +def queue_config(monkeypatch_session: MonkeyPatch) -> QueueConfig: + queue_config = QueueConfig() + if "test" not in queue_config.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return queue_config diff --git a/libs/libqueue/tests/test_queue.py b/libs/libqueue/tests/test_queue.py index f41bb879..73a3a17e 100644 --- a/libs/libqueue/tests/test_queue.py +++ b/libs/libqueue/tests/test_queue.py @@ -7,0 +8 @@ import pytest +from libqueue.config import QueueConfig @@ -18,8 +18,0 @@ from libqueue.queue import ( -from ._utils import MONGO_QUEUE_DATABASE, MONGO_URL - - [email protected](autouse=True, scope="module") -def safe_guard() -> None: - if "test" not in MONGO_QUEUE_DATABASE: - raise ValueError("Test must be launched on a test mongo database") - @@ -28,2 +21,2 @@ def safe_guard() -> None: -def client() -> None: - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) +def client(queue_config: QueueConfig) -> None: + connect_to_queue(database=queue_config.mongo_database, host=queue_config.mongo_url) diff --git a/libs/libutils/README.md b/libs/libutils/README.md deleted file mode 100644 index f2710f48..00000000 --- a/libs/libutils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# libutils - -A Python library with common code (utils, logger, exceptions) used by the services. diff --git a/libs/libutils/src/libutils/utils.py b/libs/libutils/src/libutils/utils.py deleted file mode 100644 index d78993de..00000000 --- a/libs/libutils/src/libutils/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -import base64 -from distutils.util import strtobool -from os import _Environ -from typing import Any, Dict, List, Union - -import orjson -from starlette.datastructures import QueryParams - -GenericDict = Union[_Environ[str], QueryParams, Dict[str, Union[str, int, bool]]] - - -def get_bool_value(d: GenericDict, key: str, default: bool) -> bool: - if key not in d: - return default - try: - value = bool(strtobool(str(d.get(key)))) - except (TypeError, ValueError): - value = default - return value - - -def get_int_value(d: GenericDict, key: str, default: int) -> int: - v = d.get(key) - if v is None: - return default - try: - value = int(v) - except (TypeError, ValueError): - value = default - return value - - -def get_str_value(d: GenericDict, key: str, default: str) -> str: - if key not in d: - return default - value = str(d.get(key)).strip() - return value or default - - -def get_str_list_value(d: GenericDict, key: str, default: List[str]) -> List[str]: - if key not in d: - return default - return [el.strip() for el in str(d.get(key)).split(",") if len(el.strip())] - - -def get_str_or_none_value(d: GenericDict, key: str, default: Union[str, None]) -> Union[str, None]: - if key not in d: - return default - value = str(d.get(key)).strip() - return value or default - - -# orjson is used to get rid of errors with datetime (see allenai/c4) -def orjson_default(obj: Any) -> Any: - if isinstance(obj, bytes): - return base64.b64encode(obj).decode("utf-8") - raise TypeError - - -def orjson_dumps(content: Any) -> bytes: - return orjson.dumps(content, option=orjson.OPT_UTC_Z, default=orjson_default) diff --git a/libs/libutils/tests/test_utils.py b/libs/libutils/tests/test_utils.py deleted file mode 100644 index 8f1b5e76..00000000 --- a/libs/libutils/tests/test_utils.py +++ /dev/null @@ -1,64 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from libutils.utils import ( - get_bool_value, - get_int_value, - get_str_list_value, - get_str_or_none_value, - get_str_value, -) - - -def test_get_bool_value() -> None: - assert get_bool_value({"KEY": "False"}, "KEY", True) is False - assert get_bool_value({"KEY": "True"}, "KEY", False) is True - assert get_bool_value({"KEY": "true"}, "KEY", False) is True - assert get_bool_value({"KEY": True}, "KEY", False) is True - assert get_bool_value({"KEY": "True"}, "DOESNOTEXIST", False) is False - assert get_bool_value({"KEY": ""}, "KEY", False) is False - assert get_bool_value({}, "KEY", False) is False - assert get_bool_value({}, "KEY", True) is True - - -def test_get_int_value() -> None: - default = 456 - assert get_int_value({"KEY": "123"}, "KEY", default) == 123 - assert get_int_value({"KEY": 123}, "KEY", default) == 123 - assert get_int_value({"KEY": "123"}, "DOESNOTEXIST", default) == default - assert get_int_value({"KEY": ""}, "KEY", default) == default - assert get_int_value({}, "KEY", default) == default - - -def test_get_str_value() -> None: - default = "string" - assert get_str_value({}, "KEY", default) == default - # Empty string is ignored - assert get_str_value({"KEY": ""}, "KEY", default) == default - assert get_str_value({"KEY": "test"}, "KEY", default) == "test" - assert get_str_value({"KEY": " test "}, "KEY", default) == "test" - assert get_str_value({"KEY": "None"}, "KEY", default) == "None" - assert get_str_value({"KEY": "test"}, "DOESNOTEXIST", default) == default - - -def test_get_str_list_value() -> None: - default = ["a", "b"] - assert get_str_list_value({}, "KEY", default) == default - # Empty string is NOT ignored - assert get_str_list_value({"KEY": ""}, "KEY", default) == [] - assert get_str_list_value({"KEY": "test"}, "KEY", default) == ["test"] - assert get_str_list_value({"KEY": "None"}, "KEY", default) == ["None"] - assert get_str_list_value({"KEY": "a,b,c"}, "KEY", default) == ["a", "b", "c"] - assert get_str_list_value({"KEY": "a , b, c "}, "KEY", default) == ["a", "b", "c"] - assert get_str_list_value({"KEY": "test"}, "DOESNOTEXIST", default) == default - - -def test_get_str_or_none_value() -> None: - default = "string" - assert get_str_or_none_value({}, "KEY", default) == default - # Empty string is ignored - assert get_str_or_none_value({"KEY": ""}, "KEY", default) == default - assert get_str_or_none_value({"KEY": "test"}, "KEY", default) == "test" - assert get_str_or_none_value({"KEY": "None"}, "KEY", default) == "None" - assert get_str_or_none_value({"KEY": "test"}, "DOESNOTEXIST", default) == default - assert get_str_or_none_value({}, "KEY", None) is None diff --git a/services/admin/Dockerfile b/services/admin/Dockerfile index 9a769d8c..1e20b620 100644 --- a/services/admin/Dockerfile +++ b/services/admin/Dockerfile @@ -26 +26 @@ COPY libs/libqueue/dist ./libs/libqueue/dist -COPY libs/libutils/dist ./libs/libutils/dist +COPY libs/libcommon/dist ./libs/libcommon/dist diff --git a/services/admin/Makefile b/services/admin/Makefile index 49eb6f3d..0d8ec96d 100644 --- a/services/admin/Makefile +++ b/services/admin/Makefile @@ -2,5 +2,4 @@ -export TEST_MONGO_PORT := 27030 -export TEST_MONGO_CACHE_DATABASE := datasets_server_cache_test -export TEST_MONGO_QUEUE_DATABASE := datasets_server_queue_test -export TEST_COMPOSE_PROJECT_NAME := admin -export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co +export COMPOSE_PROJECT_NAME := admin +export MONGO_PORT := 27030 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} @@ -8 +7 @@ export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml @@ -10 +8,0 @@ TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml -# Ensure to specify HF_TOKEN when calling make test, ie HF_TOKEN=hf_app_xxx make test @@ -13 +10,0 @@ include ../../tools/Docker.mk -include ./Scripts.mk diff --git a/services/admin/README.md b/services/admin/README.md index cfd0c42d..6a7c2937 100644 --- a/services/admin/README.md +++ b/services/admin/README.md @@ -7,13 +7,31 @@ -Set environment variables to configure the following aspects: - -- `ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, in which case the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. -- `CACHE_REPORTS_NUM_RESULTS`: the number of results in /cache-reports/... endpoints. Defaults to `100`. -- `HF_ENDPOINT`: URL of the HuggingFace Hub. Defaults to `https://huggingface.co`. -- `HF_ORGANIZATION`: the huggingface organization from which the authenticated user must be part of in order to access the protected routes, eg. "huggingface". If empty, the authentication is disabled. Defaults to None. -- `HF_WHOAMI_PATH`: the path of the external whoami service, on the hub (see `HF_ENDPOINT`), eg. "/api/whoami-v2". If empty, the authentication is disabled. Defaults to None. -- `LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. Defaults to `INFO`. -- `MAX_AGE_SHORT_SECONDS`: number of seconds to set in the `max-age` header on technical endpoints. Defaults to `10` (10 seconds). -- `MONGO_CACHE_DATABASE`: the name of the database used for storing the cache. Defaults to `"datasets_server_cache"`. -- `MONGO_QUEUE_DATABASE`: the name of the database used for storing the queue. Defaults to `"datasets_server_queue"`. -- `MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. -- `PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. +The worker con be configured using environment variables. They are grouped by scope. + +### Admin service + +Set environment variables to configure the application (`ADMIN_` prefix): + +- `ADMIN_HF_ORGANIZATION`: the huggingface organization from which the authenticated user must be part of in order to access the protected routes, eg. "huggingface". If empty, the authentication is disabled. Defaults to None. +- `ADMIN_CACHE_REPORTS_NUM_RESULTS`: the number of results in /cache-reports/... endpoints. Defaults to `100`. +- `ADMIN_HF_WHOAMI_PATH`: the path of the external whoami service, on the hub (see `HF_ENDPOINT`), eg. "/api/whoami-v2". Defaults to `/api/whoami-v2`. +- `ADMIN_MAX_AGE`: number of seconds to set in the `max-age` header on technical endpoints. Defaults to `10` (10 seconds). +- `ADMIN_PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. + +### Uvicorn + +The following environment variables are used to configure the Uvicorn server (`ADMIN_UVICORN_` prefix): + +- `ADMIN_UVICORN_HOSTNAME`: the hostname. Defaults to `"localhost"`. +- `ADMIN_UVICORN_NUM_WORKERS`: the number of uvicorn workers. Defaults to `2`. +- `ADMIN_UVICORN_PORT`: the port. Defaults to `8000`. + +### Cache + +See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. + +### Queue + +See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. + +### Common + +See [../../libs/libcommon/README.md](../../libs/libcommon/README.md) for more information about the common configuration. diff --git a/services/admin/poetry.lock b/services/admin/poetry.lock index f9cc006c..b905b0db 100644 --- a/services/admin/poetry.lock +++ b/services/admin/poetry.lock @@ -39 +39 @@ name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" @@ -47 +47 @@ name = "attrs" -version = "21.4.0" +version = "22.1.0" @@ -51 +51 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" @@ -54 +54 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] @@ -56,2 +56,2 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] @@ -80 +80 @@ name = "black" -version = "22.3.0" +version = "22.10.0" @@ -84 +84 @@ optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" @@ -91 +91 @@ platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} @@ -102 +102 @@ name = "certifi" -version = "2022.5.18.1" +version = "2022.9.24" @@ -110 +110 @@ name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" @@ -114 +114 @@ optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" @@ -132 +132 @@ name = "colorama" -version = "0.4.4" +version = "0.4.5" @@ -140 +140 @@ name = "coverage" -version = "6.4.1" +version = "6.5.0" @@ -176,0 +177,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -179 +197 @@ name = "filelock" -version = "3.7.1" +version = "3.8.0" @@ -186,2 +204,2 @@ python-versions = ">=3.7" -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] @@ -215 +233 @@ name = "gitpython" -version = "3.1.27" +version = "3.1.29" @@ -226 +244 @@ name = "h11" -version = "0.13.0" +version = "0.14.0" @@ -230 +248 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -259 +277 @@ name = "idna" -version = "3.3" +version = "3.4" @@ -289 +307 @@ name = "libcache" -version = "0.2.2" +version = "0.3.0" @@ -296,0 +315 @@ appdirs = ">=1.4.4,<2.0.0" +environs = ">=9.5.0,<10.0.0" @@ -303 +322 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.2.2-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" @@ -306 +325 @@ url = "../../libs/libcache/dist/libcache-0.2.2-py3-none-any.whl" -name = "libqueue" +name = "libcommon" @@ -307,0 +327,16 @@ version = "0.3.1" +description = "Library for utils, common to all the services and workers" +category = "main" +optional = false +python-versions = "==3.9.6" + +[package.dependencies] +environs = ">=9.5.0,<10.0.0" +orjson = ">=3.6.4,<4.0.0" + +[package.source] +type = "file" +url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" + +[[package]] +name = "libqueue" +version = "0.4.1" @@ -313,0 +349 @@ python-versions = "==3.9.6" +environs = ">=9.5.0,<10.0.0" @@ -315,0 +352 @@ mongoengine = ">=0.24.1,<0.25.0" +psutil = ">=5.9.2,<6.0.0" @@ -320 +357 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" @@ -323,3 +360,3 @@ url = "../../libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl" -name = "libutils" -version = "0.2.0" -description = "Library for utils" +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." @@ -328 +365 @@ optional = false -python-versions = "==3.9.6" +python-versions = ">=3.7" @@ -331,2 +368 @@ python-versions = "==3.9.6" -orjson = ">=3.6.4,<4.0.0" -starlette = ">=0.16.0,<0.17.0" +packaging = ">=17.0" @@ -334,3 +370,5 @@ starlette = ">=0.16.0,<0.17.0" -[package.source] -type = "file" -url = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl" +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] @@ -356 +394 @@ name = "mongoengine" -version = "0.24.1" +version = "0.24.2" @@ -391 +429 @@ name = "orjson" -version = "3.7.2" +version = "3.8.0" @@ -410 +448 @@ name = "pathspec" -version = "0.9.0" +version = "0.10.1" @@ -414 +452 @@ optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" @@ -418 +456 @@ name = "pbr" -version = "5.9.0" +version = "5.10.0" @@ -469,0 +508,11 @@ twisted = ["twisted"] +[[package]] +name = "psutil" +version = "5.9.2" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] + @@ -562,0 +612,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -573 +633 @@ name = "requests" -version = "2.28.0" +version = "2.28.1" @@ -581 +641 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2.0.0,<2.1.0" +charset-normalizer = ">=2,<3" @@ -587 +647 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] @@ -656 +716 @@ name = "sniffio" -version = "1.2.0" +version = "1.3.0" @@ -660 +720 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" @@ -690 +750 @@ name = "stevedore" -version = "3.5.0" +version = "4.0.1" @@ -694 +754 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" @@ -725 +785 @@ name = "tqdm" -version = "4.64.0" +version = "4.64.1" @@ -750 +810 @@ name = "typing-extensions" -version = "4.2.0" +version = "4.4.0" @@ -758 +818 @@ name = "urllib3" -version = "1.26.9" +version = "1.26.12" @@ -762 +822 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" @@ -766 +826 @@ brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] @@ -802 +862 @@ python-versions = "3.9.6" -content-hash = "2ed379b24a717b5a2d9b0c00d246302bd3ab4f4cbb18b342cd6c0bc6d9597981" +content-hash = "84c8c8207b78423217d0bded3a27686d73ba83c7797afce314e96214e4dbd3e0" @@ -817,8 +877,2 @@ asgiref = [ -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] +atomicwrites = [] +attrs = [] @@ -829,33 +883,3 @@ bandit = [ -black = [ - {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, - {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, - {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, - {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, - {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, - {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, - {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, - {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, - {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, - {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, - {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, - {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, - {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, - {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, - {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, - {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, - {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, - {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, - {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, -] -certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] +black = [] +certifi = [] +charset-normalizer = [] @@ -867,45 +891,2 @@ colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, - {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, - {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, - {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, - {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, - {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, - {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, - {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, - {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, - {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, - {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, - {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, - {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, @@ -912,0 +894 @@ coverage = [ +coverage = [] @@ -918,4 +900,2 @@ dparse = [] -filelock = [ - {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, - {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, -] +environs = [] +filelock = [] @@ -930,8 +910,2 @@ gitdb = [ -gitpython = [ - {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, - {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, -] -h11 = [ - {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"}, - {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"}, -] +gitpython = [] +h11 = [] @@ -939,4 +913 @@ huggingface-hub = [] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] +idna = [] @@ -952 +923 @@ libcache = [ - {file = "libcache-0.2.2-py3-none-any.whl", hash = "sha256:682aecaedf5782d7048b0ab0fed7bdd1e399cd06dbe2a3db31d1b3951a328559"}, + {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, @@ -954,2 +925,2 @@ libcache = [ -libqueue = [ - {file = "libqueue-0.3.1-py3-none-any.whl", hash = "sha256:bc8be8f0ffe1d82260269ce21f9814d3b083ac175f0eb6200ca84a791e427a5d"}, +libcommon = [ + {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, @@ -957,2 +928,2 @@ libqueue = [ -libutils = [ - {file = "libutils-0.2.0-py3-none-any.whl", hash = "sha256:a562dd39d4b3c5ab20bb11354e8eaf582d873f0367996df9a4c3c00609f608da"}, +libqueue = [ + {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, @@ -959,0 +931 @@ libutils = [ +marshmallow = [] @@ -968,4 +940 @@ mongo-types = [ -mongoengine = [ - {file = "mongoengine-0.24.1-py3-none-any.whl", hash = "sha256:68878b65bcb3751debcba4342180a180161cdb5f46525027e622ad081dd44fac"}, - {file = "mongoengine-0.24.1.tar.gz", hash = "sha256:01baac85f408f5eefb6195c0afeae631e7fc6fab5cb221a7b46646f94227d6da"}, -] +mongoengine = [] @@ -1000,35 +969 @@ mypy-extensions = [ -orjson = [ - {file = "orjson-3.7.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:4c6bdb0a7dfe53cca965a40371c7b8e72a0441c8bc4949c9015600f1c7fae408"}, - {file = "orjson-3.7.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6e6fc60775bb0a050846710c4a110e8ad17f41e443ff9d0d05145d8f3a74b577"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4b70bb1f746a9c9afb1f861a0496920b5833ff06f9d1b25b6a7d292cb7e8a06"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99bb2127ee174dd6e68255db26dbef0bd6c4330377a17867ecfa314d47bfac82"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:26306d988401cc34ac94dd38873b8c0384276a5ad80cdf50e266e06083284975"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:34a67d810dbcec77d00d764ab730c5bbb0bee1d75a037c8d8e981506e8fba560"}, - {file = "orjson-3.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14bc727f41ce0dd93d1a6a9fc06076e2401e71b00d0bf107bf64d88d2d963b77"}, - {file = "orjson-3.7.2-cp310-none-win_amd64.whl", hash = "sha256:4c686cbb73ccce02929dd799427897f0a0b2dd597d2f5b6b434917ecc3774146"}, - {file = "orjson-3.7.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:12eb683ddbdddd6847ca2b3b074f42574afc0fbf1aff33d8fdf3a4329167762a"}, - {file = "orjson-3.7.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:993550e6e451a2b71435142d4824a09f8db80d497abae23dc9f3fe62b6ca24c0"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54cfa4d915a98209366dcf500ee5c3f66408cc9e2b4fd777c8508f69a8f519a1"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f735999d49e2fff2c9812f1ea330b368349f77726894e2a06d17371e61d771bb"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:b2b660790b0804624c569ddb8ca9d31bac6f94f880fd54b8cdff4198735a9fec"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:590bc5f33e54eb2261de65e4026876e57d04437bab8dcade9514557e31d84537"}, - {file = "orjson-3.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8ac61c5c98cbcdcf7a3d0a4b62c873bbd9a996a69eaa44f8356a9e10aa29ef49"}, - {file = "orjson-3.7.2-cp37-none-win_amd64.whl", hash = "sha256:662bda15edf4d25d520945660873e730e3a6d9975041ba9c32f0ce93b632ee0d"}, - {file = "orjson-3.7.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:19eb800811a53efc7111ff7536079fb2f62da7098df0a42756ba91e7bdd01aff"}, - {file = "orjson-3.7.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:54a1e4e39c89d37d3dbc74dde36d09eebcde365ec6803431af9c86604bbbaf3a"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbd3b46ac514cbe29ecebcee3882383022acf84aa4d3338f26d068c6fbdf56a0"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891640d332c8c7a1478ea6d13b676d239dc86451afa46000c4e8d0990a0d72dd"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:9778a7ec4c72d6814f1e116591f351404a4df2e1dc52d282ff678781f45b509b"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b0b2483f8ad1f93ae4aa43bcf6a985e6ec278e931d0118bae605ffd811b614a1"}, - {file = "orjson-3.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d90ca4e74750c7adfb7708deb096f835f7e6c4b892bdf703fe871565bb04ad7"}, - {file = "orjson-3.7.2-cp38-none-win_amd64.whl", hash = "sha256:b0f4e92bdfe86a0da57028e669bc1f50f48d810ef6f661e63dc6593c450314bf"}, - {file = "orjson-3.7.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:b705132b2827d33291684067cca6baa451a499b459e46761d30fcf4d6ce21a9a"}, - {file = "orjson-3.7.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:c589d00b4fb0777f222b35925e4fa030c4777f16d1623669f44bdc191570be66"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e197e6779b230e74333e06db804ff876b27306470f68692ec70c27310e7366f"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a82089ec9e1f7e9b992ff5ab98b4c3c2f98e7bbfdc6fadbef046c5aaafec2b54"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3ff49c219b30d715c8baae17c7c5839fe3f2c2db10a66c61d6b91bda80bf8789"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:299a743576aaa04f5c7994010608f96df5d4a924d584a686c6e263cee732cb00"}, - {file = "orjson-3.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3ae3ed52c875ce1a6c607f852ca177057445289895483b0247f0dc57b481241"}, - {file = "orjson-3.7.2-cp39-none-win_amd64.whl", hash = "sha256:796914f7463277d371402775536fb461948c0d34a67d20a57dc4ec49a48a8613"}, - {file = "orjson-3.7.2.tar.gz", hash = "sha256:1cf9690a0b7c51a988221376741a31087bc1dc2ac327bb2dde919806dfa59444"}, -] +orjson = [] @@ -1039,8 +974,2 @@ packaging = [ -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -pbr = [ - {file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"}, - {file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"}, -] +pathspec = [] +pbr = [] @@ -1062,0 +992 @@ prometheus-client = [ +psutil = [] @@ -1195,0 +1126 @@ pytest-cov = [ +python-dotenv = [] @@ -1232,2 +1163,2 @@ requests = [ - {file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"}, - {file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"}, + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, @@ -1243,4 +1174 @@ smmap = [ -sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, -] +sniffio = [] @@ -1255,4 +1183 @@ starlette-prometheus = [ -stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, -] +stevedore = [] @@ -1271,4 +1196 @@ tomlkit = [ -tqdm = [ - {file = "tqdm-4.64.0-py2.py3-none-any.whl", hash = "sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"}, - {file = "tqdm-4.64.0.tar.gz", hash = "sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d"}, -] +tqdm = [] @@ -1307,8 +1229,2 @@ typed-ast = [ -typing-extensions = [ - {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, - {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, -] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] +typing-extensions = [] +urllib3 = [] diff --git a/services/admin/pyproject.toml b/services/admin/pyproject.toml index eead82b4..2833d178 100644 --- a/services/admin/pyproject.toml +++ b/services/admin/pyproject.toml @@ -8,0 +9 @@ license = "Apache-2.0" +environs = "^9.5.0" @@ -10,3 +11,3 @@ huggingface-hub = "^0.8.1" -libcache = { path = "../../libs/libcache/dist/libcache-0.2.2-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl", develop = false } -libutils = { path = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } diff --git a/services/admin/src/admin/app.py b/services/admin/src/admin/app.py index 8e4aa7ad..7b5828b1 100644 --- a/services/admin/src/admin/app.py +++ b/services/admin/src/admin/app.py @@ -5,0 +6 @@ from libcache.simple_cache import connect_to_cache +from libcommon.logger import init_logger @@ -7 +7,0 @@ from libqueue.queue import connect_to_queue -from libutils.logger import init_logger @@ -15,11 +15 @@ from starlette_prometheus import PrometheusMiddleware -from admin.config import ( - APP_HOSTNAME, - APP_NUM_WORKERS, - APP_PORT, - EXTERNAL_AUTH_URL, - HF_ORGANIZATION, - LOG_LEVEL, - MONGO_CACHE_DATABASE, - MONGO_QUEUE_DATABASE, - MONGO_URL, -) +from admin.config import AppConfig, UvicornConfig @@ -33,4 +23,5 @@ def create_app() -> Starlette: - init_logger(log_level=LOG_LEVEL) - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) - prometheus = Prometheus() + app_config = AppConfig() + init_logger(app_config.common.log_level) + connect_to_cache(database=app_config.cache.mongo_database, host=app_config.cache.mongo_url) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) + prometheus = Prometheus(prometheus_multiproc_dir=app_config.admin.prometheus_multiproc_dir) @@ -51 +42,7 @@ def create_app() -> Starlette: - endpoint=create_cache_reports_endpoint("features", EXTERNAL_AUTH_URL, HF_ORGANIZATION), + endpoint=create_cache_reports_endpoint( + endpoint="features", + cache_reports_num_results=app_config.admin.cache_reports_num_results, + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), @@ -55 +52,7 @@ def create_app() -> Starlette: - endpoint=create_cache_reports_endpoint("first-rows", EXTERNAL_AUTH_URL, HF_ORGANIZATION), + endpoint=create_cache_reports_endpoint( + endpoint="first-rows", + cache_reports_num_results=app_config.admin.cache_reports_num_results, + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), @@ -59 +62,7 @@ def create_app() -> Starlette: - endpoint=create_cache_reports_endpoint("splits", EXTERNAL_AUTH_URL, HF_ORGANIZATION), + endpoint=create_cache_reports_endpoint( + endpoint="splits", + cache_reports_num_results=app_config.admin.cache_reports_num_results, + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), @@ -62 +71,8 @@ def create_app() -> Starlette: - Route("/pending-jobs", endpoint=create_pending_jobs_endpoint(EXTERNAL_AUTH_URL, HF_ORGANIZATION)), + Route( + "/pending-jobs", + endpoint=create_pending_jobs_endpoint( + max_age=app_config.admin.max_age, + external_auth_url=app_config.admin.external_auth_url, + organization=app_config.admin.hf_organization, + ), + ), @@ -68 +84,8 @@ def start() -> None: - uvicorn.run("app:create_app", host=APP_HOSTNAME, port=APP_PORT, factory=True, workers=APP_NUM_WORKERS) + uvicorn_config = UvicornConfig() + uvicorn.run( + "app:create_app", + host=uvicorn_config.hostname, + port=uvicorn_config.port, + factory=True, + workers=uvicorn_config.num_workers, + ) diff --git a/services/admin/src/admin/authentication.py b/services/admin/src/admin/authentication.py index 48b685aa..6218b52c 100644 --- a/services/admin/src/admin/authentication.py +++ b/services/admin/src/admin/authentication.py @@ -40 +40,2 @@ def auth_check( - organization (str | None): the organization name + organization (str | None): the organization name. If None, the dataset is always + authorized. @@ -45 +46 @@ def auth_check( - if external_auth_url is None: + if organization is None or external_auth_url is None: diff --git a/services/admin/src/admin/config.py b/services/admin/src/admin/config.py index 8c2d5037..ad5b6a4f 100644 --- a/services/admin/src/admin/config.py +++ b/services/admin/src/admin/config.py @@ -4,37 +4,53 @@ -import os - -from libutils.utils import get_int_value, get_str_or_none_value, get_str_value - -from admin.constants import ( - DEFAULT_APP_HOSTNAME, - DEFAULT_APP_NUM_WORKERS, - DEFAULT_APP_PORT, - DEFAULT_ASSETS_DIRECTORY, - DEFAULT_CACHE_REPORTS_NUM_RESULTS, - DEFAULT_HF_ENDPOINT, - DEFAULT_HF_ORGANIZATION, - DEFAULT_HF_WHOAMI_PATH, - DEFAULT_LOG_LEVEL, - DEFAULT_MAX_AGE_SHORT_SECONDS, - DEFAULT_MONGO_CACHE_DATABASE, - DEFAULT_MONGO_QUEUE_DATABASE, - DEFAULT_MONGO_URL, -) - -APP_HOSTNAME = get_str_value(d=os.environ, key="APP_HOSTNAME", default=DEFAULT_APP_HOSTNAME) -APP_NUM_WORKERS = get_int_value(d=os.environ, key="APP_NUM_WORKERS", default=DEFAULT_APP_NUM_WORKERS) -APP_PORT = get_int_value(d=os.environ, key="APP_PORT", default=DEFAULT_APP_PORT) -ASSETS_DIRECTORY = get_str_or_none_value(d=os.environ, key="ASSETS_DIRECTORY", default=DEFAULT_ASSETS_DIRECTORY) -CACHE_REPORTS_NUM_RESULTS = get_int_value( - d=os.environ, key="CACHE_REPORTS_NUM_RESULTS", default=DEFAULT_CACHE_REPORTS_NUM_RESULTS -) -HF_ENDPOINT = get_str_value(d=os.environ, key="HF_ENDPOINT", default=DEFAULT_HF_ENDPOINT) -HF_ORGANIZATION = get_str_or_none_value(d=os.environ, key="HF_ORGANIZATION", default=DEFAULT_HF_ORGANIZATION) -HF_WHOAMI_PATH = get_str_or_none_value(d=os.environ, key="HF_WHOAMI_PATH", default=DEFAULT_HF_WHOAMI_PATH) -LOG_LEVEL = get_str_value(d=os.environ, key="LOG_LEVEL", default=DEFAULT_LOG_LEVEL) -MAX_AGE_SHORT_SECONDS = get_int_value(d=os.environ, key="MAX_AGE_SHORT_SECONDS", default=DEFAULT_MAX_AGE_SHORT_SECONDS) -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) - -EXTERNAL_AUTH_URL = None if HF_WHOAMI_PATH is None else f"{HF_ENDPOINT}{HF_WHOAMI_PATH}" +from typing import Optional + +from environs import Env +from libcache.config import CacheConfig +from libcommon.config import CommonConfig +from libqueue.config import QueueConfig + + +class UvicornConfig: + hostname: str + num_workers: int + port: int + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("ADMIN_UVICORN_"): + self.hostname = env.str(name="HOSTNAME", default="localhost") + self.num_workers = env.int(name="NUM_WORKERS", default=2) + self.port = env.int(name="PORT", default=8000) + + +class AdminConfig: + cache_reports_num_results: int + external_auth_url: str + hf_organization: Optional[str] + hf_whoami_path: str + max_age: int + prometheus_multiproc_dir: Optional[str] + + def __init__(self, hf_endpoint: str): + env = Env(expand_vars=True) + with env.prefixed("ADMIN_"): + hf_organization = env.str(name="HF_ORGANIZATION", default="") + self.hf_organization = None if hf_organization == "" else hf_organization + self.cache_reports_num_results = env.int(name="CACHE_REPORTS_NUM_RESULTS", default=100) + self.hf_whoami_path = env.str(name="HF_WHOAMI_PATH", default="/api/whoami-v2") + self.max_age = env.int(name="MAX_AGE", default=10) # 10 seconds + prometheus_multiproc_dir = env.str(name="PROMETHEUS_MULTIPROC_DIR", default="") + self.prometheus_multiproc_dir = None if prometheus_multiproc_dir == "" else prometheus_multiproc_dir + self.external_auth_url = None if self.hf_whoami_path is None else f"{hf_endpoint}{self.hf_whoami_path}" + + +class AppConfig: + admin: AdminConfig + cache: CacheConfig + common: CommonConfig + queue: QueueConfig + + def __init__(self): + self.cache = CacheConfig() + self.common = CommonConfig() + self.queue = QueueConfig() + self.admin = AdminConfig(hf_endpoint=self.common.hf_endpoint) diff --git a/services/admin/src/admin/constants.py b/services/admin/src/admin/constants.py deleted file mode 100644 index a04f0945..00000000 --- a/services/admin/src/admin/constants.py +++ /dev/null @@ -1,16 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -DEFAULT_APP_HOSTNAME: str = "localhost" -DEFAULT_APP_NUM_WORKERS: int = 2 -DEFAULT_APP_PORT: int = 8000 -DEFAULT_ASSETS_DIRECTORY: None = None -DEFAULT_CACHE_REPORTS_NUM_RESULTS: int = 100 -DEFAULT_HF_ENDPOINT: str = "https://huggingface.co" -DEFAULT_HF_ORGANIZATION: None = None -DEFAULT_HF_WHOAMI_PATH: None = None -DEFAULT_LOG_LEVEL: str = "INFO" -DEFAULT_MAX_AGE_SHORT_SECONDS: int = 10 # 10 seconds -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache" -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27017" diff --git a/services/admin/src/admin/prometheus.py b/services/admin/src/admin/prometheus.py index 635de063..0dcac720 100644 --- a/services/admin/src/admin/prometheus.py +++ b/services/admin/src/admin/prometheus.py @@ -4,2 +4 @@ -import os -from typing import Dict +from typing import Dict, Optional @@ -28,0 +28 @@ class Prometheus: + first_rows_queue: Queue @@ -29,0 +30,2 @@ class Prometheus: + split_queue: Queue + prometheus_multiproc_dir: Optional[str] @@ -31 +33,2 @@ class Prometheus: - def __init__(self): + def __init__(self, prometheus_multiproc_dir: Optional[str]): + self.prometheus_multiproc_dir = prometheus_multiproc_dir @@ -38 +41 @@ class Prometheus: - if "PROMETHEUS_MULTIPROC_DIR" in os.environ: + if self.prometheus_multiproc_dir is not None: @@ -40 +43 @@ class Prometheus: - MultiProcessCollector(registry) + MultiProcessCollector(registry=registry, path=self.prometheus_multiproc_dir) diff --git a/services/admin/src/admin/routes/cache_reports.py b/services/admin/src/admin/routes/cache_reports.py index 94c7d8e3..3737eeda 100644 --- a/services/admin/src/admin/routes/cache_reports.py +++ b/services/admin/src/admin/routes/cache_reports.py @@ -18 +17,0 @@ from admin.authentication import auth_check -from admin.config import CACHE_REPORTS_NUM_RESULTS @@ -42 +41,5 @@ def create_cache_reports_endpoint( - endpoint: EndpointName, external_auth_url: Optional[str] = None, organization: Optional[str] = None + endpoint: EndpointName, + cache_reports_num_results: int, + max_age: int, + external_auth_url: Optional[str] = None, + organization: Optional[str] = None, @@ -53 +56,4 @@ def create_cache_reports_endpoint( - return get_json_ok_response(get_cache_reports(cursor, CACHE_REPORTS_NUM_RESULTS)) + return get_json_ok_response( + get_cache_reports(cursor=cursor, limit=cache_reports_num_results), + max_age=max_age, + ) @@ -61 +67 @@ def create_cache_reports_endpoint( - return get_json_admin_error_response(e) + return get_json_admin_error_response(e, max_age=max_age) @@ -63 +69 @@ def create_cache_reports_endpoint( - return get_json_admin_error_response(UnexpectedError("Unexpected error.")) + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=max_age) diff --git a/services/admin/src/admin/routes/pending_jobs.py b/services/admin/src/admin/routes/pending_jobs.py index 63024b6e..cba8a573 100644 --- a/services/admin/src/admin/routes/pending_jobs.py +++ b/services/admin/src/admin/routes/pending_jobs.py @@ -25 +25 @@ def create_pending_jobs_endpoint( - external_auth_url: Optional[str] = None, organization: Optional[str] = None + max_age: int, external_auth_url: Optional[str] = None, organization: Optional[str] = None @@ -39 +39,2 @@ def create_pending_jobs_endpoint( - } + }, + max_age=max_age, @@ -42 +43 @@ def create_pending_jobs_endpoint( - return get_json_admin_error_response(e) + return get_json_admin_error_response(e, max_age=max_age) @@ -44 +45 @@ def create_pending_jobs_endpoint( - return get_json_admin_error_response(UnexpectedError("Unexpected error.")) + return get_json_admin_error_response(UnexpectedError("Unexpected error."), max_age=max_age) diff --git a/services/admin/src/admin/scripts/cancel_jobs_first_rows.py b/services/admin/src/admin/scripts/cancel_jobs_first_rows.py index 28daa229..b8b844fb 100644 --- a/services/admin/src/admin/scripts/cancel_jobs_first_rows.py +++ b/services/admin/src/admin/scripts/cancel_jobs_first_rows.py @@ -5,0 +6 @@ import logging +from libcommon.logger import init_logger @@ -7 +7,0 @@ from libqueue.queue import Queue, connect_to_queue -from libutils.logger import init_logger @@ -9 +9 @@ from libutils.logger import init_logger -from admin.config import LOG_LEVEL, MONGO_QUEUE_DATABASE, MONGO_URL +from admin.config import AppConfig @@ -13 +13,2 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL, "cancel_jobs_first_rows") + app_config = AppConfig() + init_logger(app_config.common.log_level, "cancel_jobs_first_rows") @@ -15 +16 @@ if __name__ == "__main__": - connect_to_queue(MONGO_QUEUE_DATABASE, MONGO_URL) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) diff --git a/services/admin/src/admin/scripts/cancel_jobs_splits.py b/services/admin/src/admin/scripts/cancel_jobs_splits.py index 5b83a1d9..84781418 100644 --- a/services/admin/src/admin/scripts/cancel_jobs_splits.py +++ b/services/admin/src/admin/scripts/cancel_jobs_splits.py @@ -5,0 +6 @@ import logging +from libcommon.logger import init_logger @@ -7 +7,0 @@ from libqueue.queue import Queue, connect_to_queue -from libutils.logger import init_logger @@ -9 +9 @@ from libutils.logger import init_logger -from admin.config import LOG_LEVEL, MONGO_QUEUE_DATABASE, MONGO_URL +from admin.config import AppConfig @@ -13 +13,2 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL, "cancel_jobs_splits") + app_config = AppConfig() + init_logger(app_config.common.log_level, "cancel_jobs_splits") @@ -15 +16 @@ if __name__ == "__main__": - connect_to_queue(MONGO_QUEUE_DATABASE, MONGO_URL) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) diff --git a/services/admin/src/admin/scripts/refresh_cache.py b/services/admin/src/admin/scripts/refresh_cache.py index ad196f97..ff15aff1 100644 --- a/services/admin/src/admin/scripts/refresh_cache.py +++ b/services/admin/src/admin/scripts/refresh_cache.py @@ -7,0 +8 @@ from huggingface_hub.hf_api import HfApi # type: ignore +from libcommon.logger import init_logger @@ -9 +9,0 @@ from libqueue.queue import Queue, connect_to_queue -from libutils.logger import init_logger @@ -11 +11 @@ from libutils.logger import init_logger -from admin.config import HF_ENDPOINT, LOG_LEVEL, MONGO_QUEUE_DATABASE, MONGO_URL +from admin.config import AppConfig @@ -15,2 +15,2 @@ from admin.utils import JobType -def get_hf_dataset_names(): - return [str(dataset.id) for dataset in HfApi(HF_ENDPOINT).list_datasets(full=False)] +def get_hf_dataset_names(hf_endpoint: str): + return [str(dataset.id) for dataset in HfApi(hf_endpoint).list_datasets(full=False)] @@ -22 +21,0 @@ def refresh_datasets_cache(dataset_names: List[str]) -> None: - connect_to_queue(MONGO_QUEUE_DATABASE, MONGO_URL) @@ -30 +29,2 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL, "refresh_cache") + app_config = AppConfig() + init_logger(app_config.common.log_level, "refresh_cache") @@ -32 +32,2 @@ if __name__ == "__main__": - refresh_datasets_cache(get_hf_dataset_names()) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) + refresh_datasets_cache(get_hf_dataset_names(hf_endpoint=app_config.common.hf_endpoint)) diff --git a/services/admin/src/admin/scripts/refresh_cache_canonical.py b/services/admin/src/admin/scripts/refresh_cache_canonical.py index 3548236b..2283a0b1 100644 --- a/services/admin/src/admin/scripts/refresh_cache_canonical.py +++ b/services/admin/src/admin/scripts/refresh_cache_canonical.py @@ -7 +7,2 @@ from huggingface_hub.hf_api import HfApi # type: ignore -from libutils.logger import init_logger +from libcommon.logger import init_logger +from libqueue.queue import connect_to_queue @@ -9 +10 @@ from libutils.logger import init_logger -from admin.config import HF_ENDPOINT, LOG_LEVEL +from admin.config import AppConfig @@ -13,2 +14,2 @@ from admin.scripts.refresh_cache import refresh_datasets_cache -def get_hf_canonical_dataset_names(): - return [str(dataset.id) for dataset in HfApi(HF_ENDPOINT).list_datasets(full=False) if dataset.id.find("/") == -1] +def get_hf_canonical_dataset_names(hf_endpoint: str): + return [str(dataset.id) for dataset in HfApi(hf_endpoint).list_datasets(full=False) if dataset.id.find("/") == -1] @@ -18 +19,2 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL, "refresh_cache_canonical") + app_config = AppConfig() + init_logger(app_config.common.log_level, "refresh_cache_canonical") @@ -20 +22,2 @@ if __name__ == "__main__": - refresh_datasets_cache(get_hf_canonical_dataset_names()) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) + refresh_datasets_cache(get_hf_canonical_dataset_names(hf_endpoint=app_config.common.hf_endpoint)) diff --git a/services/admin/src/admin/scripts/refresh_cache_errors.py b/services/admin/src/admin/scripts/refresh_cache_errors.py index 5ababb59..ae5f3a26 100644 --- a/services/admin/src/admin/scripts/refresh_cache_errors.py +++ b/services/admin/src/admin/scripts/refresh_cache_errors.py @@ -7 +7,2 @@ from libcache.simple_cache import connect_to_cache, get_datasets_with_some_error -from libutils.logger import init_logger +from libcommon.logger import init_logger +from libqueue.queue import connect_to_queue @@ -9 +10 @@ from libutils.logger import init_logger -from admin.config import LOG_LEVEL, MONGO_CACHE_DATABASE, MONGO_URL +from admin.config import AppConfig @@ -13 +14,2 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL, "refresh_cache_canonical") + app_config = AppConfig() + init_logger(app_config.common.log_level, "refresh_cache_canonical") @@ -15 +17,2 @@ if __name__ == "__main__": - connect_to_cache(MONGO_CACHE_DATABASE, MONGO_URL) + connect_to_cache(database=app_config.cache.mongo_database, host=app_config.cache.mongo_url) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) diff --git a/services/admin/src/admin/utils.py b/services/admin/src/admin/utils.py index 61672e0e..c9e7a058 100644 --- a/services/admin/src/admin/utils.py +++ b/services/admin/src/admin/utils.py @@ -8,2 +8,2 @@ from typing import Any, Callable, Coroutine, Literal, Optional -from libutils.exceptions import CustomError -from libutils.utils import orjson_dumps +from libcommon.exceptions import CustomError +from libcommon.utils import orjson_dumps @@ -13,2 +12,0 @@ from starlette.responses import JSONResponse, Response -from admin.config import MAX_AGE_SHORT_SECONDS - @@ -81,2 +79,2 @@ def get_json_response( -def get_json_ok_response(content: Any) -> Response: - return get_json_response(content, max_age=MAX_AGE_SHORT_SECONDS) +def get_json_ok_response(content: Any, max_age: int) -> Response: + return get_json_response(content=content, max_age=max_age) @@ -86 +84 @@ def get_json_error_response( - content: Any, status_code: HTTPStatus = HTTPStatus.OK, error_code: Optional[str] = None + content: Any, max_age: int, status_code: HTTPStatus = HTTPStatus.OK, error_code: Optional[str] = None @@ -88 +86 @@ def get_json_error_response( - return get_json_response(content, status_code=status_code, max_age=MAX_AGE_SHORT_SECONDS, error_code=error_code) + return get_json_response(content=content, status_code=status_code, max_age=max_age, error_code=error_code) @@ -91,2 +89,4 @@ def get_json_error_response( -def get_json_admin_error_response(error: AdminCustomError) -> Response: - return get_json_error_response(error.as_response(), error.status_code, error.code) +def get_json_admin_error_response(error: AdminCustomError, max_age: int) -> Response: + return get_json_error_response( + content=error.as_response(), status_code=error.status_code, max_age=max_age, error_code=error.code + ) diff --git a/services/admin/tests/conftest.py b/services/admin/tests/conftest.py index 7cb3ac81..188a62a5 100644 --- a/services/admin/tests/conftest.py +++ b/services/admin/tests/conftest.py @@ -3,0 +4,4 @@ +from pytest import MonkeyPatch, fixture + +from admin.config import AppConfig + @@ -5,0 +10,20 @@ pytest_plugins = ["tests.fixtures.hub"] + + +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(hf_endpoint: str, hf_token: str): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + monkeypatch_session.setenv("COMMON_HF_ENDPOINT", hf_endpoint) + monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session") +def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: + app_config = AppConfig() + if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return app_config diff --git a/services/admin/tests/fixtures/hub.py b/services/admin/tests/fixtures/hub.py index d96dc435..dd960c1e 100644 --- a/services/admin/tests/fixtures/hub.py +++ b/services/admin/tests/fixtures/hub.py @@ -102,0 +103,5 @@ def hf_api(): [email protected](scope="session") +def hf_endpoint() -> str: + return CI_HUB_ENDPOINT + + diff --git a/services/admin/tests/scripts/test_refresh_cache_canonical.py b/services/admin/tests/scripts/test_refresh_cache_canonical.py index 9d76621d..adedd56b 100644 --- a/services/admin/tests/scripts/test_refresh_cache_canonical.py +++ b/services/admin/tests/scripts/test_refresh_cache_canonical.py @@ -3,0 +4 @@ +from admin.config import AppConfig @@ -9,2 +10,2 @@ from ..fixtures.hub import DatasetRepos -def test_get_hf_canonical_dataset_names(hf_dataset_repos_csv_data: DatasetRepos) -> None: - dataset_names = get_hf_canonical_dataset_names() +def test_get_hf_canonical_dataset_names(app_config: AppConfig, hf_dataset_repos_csv_data: DatasetRepos) -> None: + dataset_names = get_hf_canonical_dataset_names(hf_endpoint=app_config.common.hf_endpoint) diff --git a/services/admin/tests/test_app.py b/services/admin/tests/test_app.py index c3d4f0c8..7f967dd5 100644 --- a/services/admin/tests/test_app.py +++ b/services/admin/tests/test_app.py @@ -12 +11,0 @@ from admin.app import create_app -from admin.config import MONGO_CACHE_DATABASE, MONGO_QUEUE_DATABASE @@ -16,8 +14,0 @@ from admin.utils import JobType [email protected](autouse=True, scope="module") -def safe_guard() -> None: - if "test" not in MONGO_CACHE_DATABASE: - raise ValueError("Tests on cache must be launched on a test mongo database") - if "test" not in MONGO_QUEUE_DATABASE: - raise ValueError("Tests on queue must be launched on a test mongo database") - - @@ -25 +16 @@ def safe_guard() -> None: -def client() -> TestClient: +def client(monkeypatch_session: pytest.MonkeyPatch) -> TestClient: diff --git a/services/admin/tests/test_authentication.py b/services/admin/tests/test_authentication.py index d4cb710d..00b4c46c 100644 --- a/services/admin/tests/test_authentication.py +++ b/services/admin/tests/test_authentication.py @@ -4 +4 @@ -from typing import Dict +from typing import Dict, Optional, Type @@ -23 +23 @@ def test_unreachable_external_auth_check_service() -> None: - auth_check(external_auth_url="https://auth.check") + auth_check(external_auth_url="https://auth.check", organization="org") @@ -27 +27,11 @@ def test_unreachable_external_auth_check_service() -> None: -def test_external_auth_responses_without_request() -> None: [email protected]( + "status,error", + [ + (200, None), + (401, ExternalUnauthenticatedError), + (403, ExternalAuthenticatedError), + (404, ExternalAuthenticatedError), + (429, ValueError), + ], +) +def test_external_auth_responses_without_request(status: int, error: Optional[Type[Exception]]) -> None: @@ -30,18 +40,6 @@ def test_external_auth_responses_without_request() -> None: - responses.add(responses.GET, url, status=200, body=body) - assert auth_check(external_auth_url=url, organization=None) is True - - responses.add(responses.GET, url, status=401, body=body) - with pytest.raises(ExternalUnauthenticatedError): - auth_check(external_auth_url=url, organization=None) - - responses.add(responses.GET, url, status=403, body=body) - with pytest.raises(ExternalAuthenticatedError): - auth_check(external_auth_url=url, organization=None) - - responses.add(responses.GET, url, status=404, body=body) - with pytest.raises(ExternalAuthenticatedError): - auth_check(external_auth_url=url, organization=None) - - responses.add(responses.GET, url, status=429, body=body) - with pytest.raises(ValueError): - auth_check(external_auth_url=url, organization=None) + responses.add(responses.GET, url, status=status, body=body) + if error is None: + assert auth_check(external_auth_url=url, organization="org1") is True + else: + with pytest.raises(error): + auth_check(external_auth_url=url, organization="org1") @@ -51 +49,5 @@ def test_external_auth_responses_without_request() -> None: -def test_org() -> None: [email protected]( + "org,status,error", + [("org1", 200, None), ("org2", 403, ExternalAuthenticatedError)], +) +def test_org(org: str, status: int, error: Optional[Type[Exception]]) -> None: @@ -54,6 +56,6 @@ def test_org() -> None: - responses.add(responses.GET, url, status=200, body=body) - assert auth_check(external_auth_url=url, organization="org1") is True - - responses.add(responses.GET, url, status=403, body=body) - with pytest.raises(ExternalAuthenticatedError): - auth_check(external_auth_url=url, organization="org2") + responses.add(responses.GET, url, status=status, body=body) + if error is None: + assert auth_check(external_auth_url=url, organization=org) is True + else: + with pytest.raises(error): + auth_check(external_auth_url=url, organization=org) diff --git a/services/api/Dockerfile b/services/api/Dockerfile index 98fabffe..f7857391 100644 --- a/services/api/Dockerfile +++ b/services/api/Dockerfile @@ -26 +26 @@ COPY libs/libqueue/dist ./libs/libqueue/dist -COPY libs/libutils/dist ./libs/libutils/dist +COPY libs/libcommon/dist ./libs/libcommon/dist diff --git a/services/api/Makefile b/services/api/Makefile index 5eebae2d..97518e03 100644 --- a/services/api/Makefile +++ b/services/api/Makefile @@ -2,5 +2,4 @@ -export TEST_MONGO_PORT := 27031 -export TEST_MONGO_CACHE_DATABASE := datasets_server_cache_test -export TEST_MONGO_QUEUE_DATABASE := datasets_server_queue_test -export TEST_COMPOSE_PROJECT_NAME := api -export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co +export COMPOSE_PROJECT_NAME := api +export MONGO_PORT := 27031 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} @@ -8 +7 @@ export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml diff --git a/services/api/README.md b/services/api/README.md index 9a232f53..26e9e119 100644 --- a/services/api/README.md +++ b/services/api/README.md @@ -7,16 +7,30 @@ -Set environment variables to configure the following aspects: - -- `API_HOSTNAME`: the hostname used by the API endpoint. Defaults to `"localhost"`. -- `API_NUM_WORKERS`: the number of workers of the API endpoint. Defaults to `2`. -- `API_PORT`: the port used by the API endpoint. Defaults to `8000`. -- `ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, in which case the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. -- `HF_AUTH_PATH`: the path of the external authentication service, on the hub (see `HF_ENDPOINT`). The string must contain `%s` which will be replaced with the dataset name. The external authentication service must return 200, 401, 403 or 404. If empty, the authentication is disabled. Defaults to "/api/datasets/%s/auth-check". -- `HF_ENDPOINT`: URL of the HuggingFace Hub. Defaults to `https://huggingface.co`. -- `HF_TOKEN`: App Access Token (ask moonlanding administrators to get one, only the `read` role is required), to access the gated datasets. Defaults to empty. -- `LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. Defaults to `INFO`. -- `MAX_AGE_LONG_SECONDS`: number of seconds to set in the `max-age` header on data endpoints. Defaults to `120` (2 minutes). -- `MAX_AGE_SHORT_SECONDS`: number of seconds to set in the `max-age` header on technical endpoints. Defaults to `10` (10 seconds). -- `MONGO_CACHE_DATABASE`: the name of the database used for storing the cache. Defaults to `"datasets_server_cache"`. -- `MONGO_QUEUE_DATABASE`: the name of the database used for storing the queue. Defaults to `"datasets_server_queue"`. -- `MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. -- `PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. +The worker con be configured using environment variables. They are grouped by scope. + +### API service + +Set environment variables to configure the application (`API_` prefix): + +- `API_HF_AUTH_PATH`: the path of the external authentication service, on the hub (see `HF_ENDPOINT`). The string must contain `%s` which will be replaced with the dataset name. The external authentication service must return 200, 401, 403 or 404. Defaults to "/api/datasets/%s/auth-check". +- `API_MAX_AGE_LONG`: number of seconds to set in the `max-age` header on data endpoints. Defaults to `120` (2 minutes). +- `API_MAX_AGE_SHORT`: number of seconds to set in the `max-age` header on technical endpoints. Defaults to `10` (10 seconds). +- `API_PROMETHEUS_MULTIPROC_DIR`: the directory where the uvicorn workers share their prometheus metrics. See https://github.com/prometheus/client_python#multiprocess-mode-eg-gunicorn. Defaults to empty, in which case every worker manages its own metrics, and the /metrics endpoint returns the metrics of a random worker. + +### Uvicorn + +The following environment variables are used to configure the Uvicorn server (`API_UVICORN_` prefix): + +- `API_UVICORN_HOSTNAME`: the hostname. Defaults to `"localhost"`. +- `API_UVICORN_NUM_WORKERS`: the number of uvicorn workers. Defaults to `2`. +- `API_UVICORN_PORT`: the port. Defaults to `8000`. + +### Cache + +See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. + +### Queue + +See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. + +### Common + +See [../../libs/libcommon/README.md](../../libs/libcommon/README.md) for more information about the common configuration. diff --git a/services/api/poetry.lock b/services/api/poetry.lock index 566702a1..504eeda4 100644 --- a/services/api/poetry.lock +++ b/services/api/poetry.lock @@ -3 +3 @@ name = "anyio" -version = "3.6.1" +version = "3.6.2" @@ -16 +16 @@ test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytes -trio = ["trio (>=0.16)"] +trio = ["trio (>=0.16,<0.22)"] @@ -39 +39 @@ name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" @@ -47 +47 @@ name = "attrs" -version = "21.4.0" +version = "22.1.0" @@ -51 +51 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" @@ -54 +54 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] @@ -56,2 +56,2 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] @@ -80 +80 @@ name = "black" -version = "22.3.0" +version = "22.10.0" @@ -84 +84 @@ optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" @@ -91 +91 @@ platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} @@ -102 +102 @@ name = "certifi" -version = "2022.5.18.1" +version = "2022.9.24" @@ -110 +110 @@ name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" @@ -114 +114 @@ optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" @@ -132 +132 @@ name = "colorama" -version = "0.4.4" +version = "0.4.5" @@ -140 +140 @@ name = "coverage" -version = "6.4.1" +version = "6.5.0" @@ -176,0 +177,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -215 +233 @@ name = "gitpython" -version = "3.1.27" +version = "3.1.29" @@ -226 +244 @@ name = "h11" -version = "0.13.0" +version = "0.14.0" @@ -230 +248 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" @@ -259 +277 @@ name = "idna" -version = "3.3" +version = "3.4" @@ -305 +323 @@ name = "libcache" -version = "0.2.1" +version = "0.3.0" @@ -312,0 +331 @@ appdirs = ">=1.4.4,<2.0.0" +environs = ">=9.5.0,<10.0.0" @@ -319 +338 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" @@ -322 +341 @@ url = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl" -name = "libqueue" +name = "libcommon" @@ -324 +343 @@ version = "0.3.1" -description = "Library for the jobs queue in mongodb" +description = "Library for utils, common to all the services and workers" @@ -330,3 +349,2 @@ python-versions = "==3.9.6" -mongo-types = "0.15.1" -mongoengine = ">=0.24.1,<0.25.0" -pymongo = {version = ">=3.12.3,<4.0.0", extras = ["srv"]} +environs = ">=9.5.0,<10.0.0" +orjson = ">=3.6.4,<4.0.0" @@ -336 +354 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl" +url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" @@ -339,3 +357,3 @@ url = "../../libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl" -name = "libutils" -version = "0.2.0" -description = "Library for utils" +name = "libqueue" +version = "0.4.1" +description = "Library for the jobs queue in mongodb" @@ -347,2 +365,5 @@ python-versions = "==3.9.6" -orjson = ">=3.6.4,<4.0.0" -starlette = ">=0.16.0,<0.17.0" +environs = ">=9.5.0,<10.0.0" +mongo-types = "0.15.1" +mongoengine = ">=0.24.1,<0.25.0" +psutil = ">=5.9.2,<6.0.0" +pymongo = {version = ">=3.12.3,<4.0.0", extras = ["srv"]} @@ -352 +373 @@ type = "file" -url = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" @@ -361,0 +383,17 @@ python-versions = ">=3.7" +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + @@ -380 +418 @@ name = "mongoengine" -version = "0.24.1" +version = "0.24.2" @@ -415 +453 @@ name = "orjson" -version = "3.7.2" +version = "3.8.0" @@ -434 +472 @@ name = "pathspec" -version = "0.9.0" +version = "0.10.1" @@ -438 +476 @@ optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" @@ -442 +480 @@ name = "pbr" -version = "5.9.0" +version = "5.10.0" @@ -493,0 +532,11 @@ twisted = ["twisted"] +[[package]] +name = "psutil" +version = "5.9.3" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] + @@ -605,0 +655,11 @@ Werkzeug = ">=2.0.0" +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -616 +676 @@ name = "requests" -version = "2.28.0" +version = "2.28.1" @@ -624 +684 @@ certifi = ">=2017.4.17" -charset-normalizer = ">=2.0.0,<2.1.0" +charset-normalizer = ">=2,<3" @@ -630 +690 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] @@ -684 +744 @@ name = "sniffio" -version = "1.2.0" +version = "1.3.0" @@ -688 +748 @@ optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" @@ -718 +778 @@ name = "stevedore" -version = "3.5.0" +version = "4.0.1" @@ -722 +782 @@ optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" @@ -778 +838 @@ name = "typing-extensions" -version = "4.2.0" +version = "4.4.0" @@ -786 +846 @@ name = "urllib3" -version = "1.26.9" +version = "1.26.12" @@ -790 +850 @@ optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" @@ -794 +854 @@ brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] @@ -844 +904 @@ python-versions = "3.9.6" -content-hash = "a0dadf28310314e4d24d1993ebe7d65102b08dec8b768a4a068b2bc1bced9b51" +content-hash = "0f241acb38c4931b5f2e9ae27adfff99e98536f4fc5c005b0afe673fbf95f495" @@ -847,4 +907 @@ content-hash = "a0dadf28310314e4d24d1993ebe7d65102b08dec8b768a4a068b2bc1bced9b51 -anyio = [ - {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, - {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, -] +anyio = [] @@ -859,8 +916,2 @@ asgiref = [ -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] +atomicwrites = [] +attrs = [] @@ -871,33 +922,3 @@ bandit = [ -black = [ - {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, - {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, - {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, - {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, - {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, - {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, - {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, - {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, - {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, - {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, - {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, - {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, - {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, - {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, - {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, - {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, - {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, - {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, - {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, -] -certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] +black = [] +certifi = [] +charset-normalizer = [] @@ -909,45 +930,2 @@ colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, - {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, - {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, - {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, - {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, - {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, - {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, - {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, - {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, - {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, - {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, - {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, - {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, @@ -954,0 +933 @@ coverage = [ +coverage = [] @@ -959,0 +939 @@ dparse = [] +environs = [] @@ -969,8 +949,2 @@ gitdb = [ -gitpython = [ - {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, - {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, -] -h11 = [ - {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"}, - {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"}, -] +gitpython = [] +h11 = [] @@ -978,4 +952 @@ huggingface-hub = [] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] +idna = [] @@ -992 +963 @@ libcache = [ - {file = "libcache-0.2.1-py3-none-any.whl", hash = "sha256:62c57b8e12a70241106cd9bcc7b845b40ba5ff9dd6423691de269a42f507943f"}, + {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, @@ -994,2 +965,2 @@ libcache = [ -libqueue = [ - {file = "libqueue-0.3.1-py3-none-any.whl", hash = "sha256:bc8be8f0ffe1d82260269ce21f9814d3b083ac175f0eb6200ca84a791e427a5d"}, +libcommon = [ + {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, @@ -997,2 +968,2 @@ libqueue = [ -libutils = [ - {file = "libutils-0.2.0-py3-none-any.whl", hash = "sha256:a562dd39d4b3c5ab20bb11354e8eaf582d873f0367996df9a4c3c00609f608da"}, +libqueue = [ + {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, @@ -1000,0 +972 @@ markupsafe = [] +marshmallow = [] @@ -1009,4 +981 @@ mongo-types = [ -mongoengine = [ - {file = "mongoengine-0.24.1-py3-none-any.whl", hash = "sha256:68878b65bcb3751debcba4342180a180161cdb5f46525027e622ad081dd44fac"}, - {file = "mongoengine-0.24.1.tar.gz", hash = "sha256:01baac85f408f5eefb6195c0afeae631e7fc6fab5cb221a7b46646f94227d6da"}, -] +mongoengine = [] @@ -1041,35 +1010 @@ mypy-extensions = [ -orjson = [ - {file = "orjson-3.7.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:4c6bdb0a7dfe53cca965a40371c7b8e72a0441c8bc4949c9015600f1c7fae408"}, - {file = "orjson-3.7.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6e6fc60775bb0a050846710c4a110e8ad17f41e443ff9d0d05145d8f3a74b577"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4b70bb1f746a9c9afb1f861a0496920b5833ff06f9d1b25b6a7d292cb7e8a06"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99bb2127ee174dd6e68255db26dbef0bd6c4330377a17867ecfa314d47bfac82"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:26306d988401cc34ac94dd38873b8c0384276a5ad80cdf50e266e06083284975"}, - {file = "orjson-3.7.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:34a67d810dbcec77d00d764ab730c5bbb0bee1d75a037c8d8e981506e8fba560"}, - {file = "orjson-3.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14bc727f41ce0dd93d1a6a9fc06076e2401e71b00d0bf107bf64d88d2d963b77"}, - {file = "orjson-3.7.2-cp310-none-win_amd64.whl", hash = "sha256:4c686cbb73ccce02929dd799427897f0a0b2dd597d2f5b6b434917ecc3774146"}, - {file = "orjson-3.7.2-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:12eb683ddbdddd6847ca2b3b074f42574afc0fbf1aff33d8fdf3a4329167762a"}, - {file = "orjson-3.7.2-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:993550e6e451a2b71435142d4824a09f8db80d497abae23dc9f3fe62b6ca24c0"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54cfa4d915a98209366dcf500ee5c3f66408cc9e2b4fd777c8508f69a8f519a1"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f735999d49e2fff2c9812f1ea330b368349f77726894e2a06d17371e61d771bb"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:b2b660790b0804624c569ddb8ca9d31bac6f94f880fd54b8cdff4198735a9fec"}, - {file = "orjson-3.7.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:590bc5f33e54eb2261de65e4026876e57d04437bab8dcade9514557e31d84537"}, - {file = "orjson-3.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8ac61c5c98cbcdcf7a3d0a4b62c873bbd9a996a69eaa44f8356a9e10aa29ef49"}, - {file = "orjson-3.7.2-cp37-none-win_amd64.whl", hash = "sha256:662bda15edf4d25d520945660873e730e3a6d9975041ba9c32f0ce93b632ee0d"}, - {file = "orjson-3.7.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:19eb800811a53efc7111ff7536079fb2f62da7098df0a42756ba91e7bdd01aff"}, - {file = "orjson-3.7.2-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:54a1e4e39c89d37d3dbc74dde36d09eebcde365ec6803431af9c86604bbbaf3a"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbd3b46ac514cbe29ecebcee3882383022acf84aa4d3338f26d068c6fbdf56a0"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891640d332c8c7a1478ea6d13b676d239dc86451afa46000c4e8d0990a0d72dd"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:9778a7ec4c72d6814f1e116591f351404a4df2e1dc52d282ff678781f45b509b"}, - {file = "orjson-3.7.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b0b2483f8ad1f93ae4aa43bcf6a985e6ec278e931d0118bae605ffd811b614a1"}, - {file = "orjson-3.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d90ca4e74750c7adfb7708deb096f835f7e6c4b892bdf703fe871565bb04ad7"}, - {file = "orjson-3.7.2-cp38-none-win_amd64.whl", hash = "sha256:b0f4e92bdfe86a0da57028e669bc1f50f48d810ef6f661e63dc6593c450314bf"}, - {file = "orjson-3.7.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:b705132b2827d33291684067cca6baa451a499b459e46761d30fcf4d6ce21a9a"}, - {file = "orjson-3.7.2-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:c589d00b4fb0777f222b35925e4fa030c4777f16d1623669f44bdc191570be66"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e197e6779b230e74333e06db804ff876b27306470f68692ec70c27310e7366f"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a82089ec9e1f7e9b992ff5ab98b4c3c2f98e7bbfdc6fadbef046c5aaafec2b54"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3ff49c219b30d715c8baae17c7c5839fe3f2c2db10a66c61d6b91bda80bf8789"}, - {file = "orjson-3.7.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:299a743576aaa04f5c7994010608f96df5d4a924d584a686c6e263cee732cb00"}, - {file = "orjson-3.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3ae3ed52c875ce1a6c607f852ca177057445289895483b0247f0dc57b481241"}, - {file = "orjson-3.7.2-cp39-none-win_amd64.whl", hash = "sha256:796914f7463277d371402775536fb461948c0d34a67d20a57dc4ec49a48a8613"}, - {file = "orjson-3.7.2.tar.gz", hash = "sha256:1cf9690a0b7c51a988221376741a31087bc1dc2ac327bb2dde919806dfa59444"}, -] +orjson = [] @@ -1080,8 +1015,2 @@ packaging = [ -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -pbr = [ - {file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"}, - {file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"}, -] +pathspec = [] +pbr = [] @@ -1103,0 +1033 @@ prometheus-client = [ +psutil = [] @@ -1260,0 +1191 @@ pytest-httpserver = [] +python-dotenv = [] @@ -1297,2 +1228,2 @@ requests = [ - {file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"}, - {file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"}, + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, @@ -1307,4 +1238 @@ smmap = [ -sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, -] +sniffio = [] @@ -1319,4 +1247 @@ starlette-prometheus = [ -stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, -] +stevedore = [] @@ -1368,8 +1293,2 @@ typed-ast = [ -typing-extensions = [ - {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, - {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, -] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] +typing-extensions = [] +urllib3 = [] diff --git a/services/api/pyproject.toml b/services/api/pyproject.toml index 4b4bfd4e..dc0a7a3d 100644 --- a/services/api/pyproject.toml +++ b/services/api/pyproject.toml @@ -11,3 +11,3 @@ jsonschema = "^4.16.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.3.1-py3-none-any.whl", develop = false } -libutils = { path = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } @@ -18,0 +19 @@ watchdog = { extras = ["watchmedo"], version = "^2.1.3" } +environs = "^9.5.0" diff --git a/services/api/src/api/app.py b/services/api/src/api/app.py index cc75371a..67bff8ef 100644 --- a/services/api/src/api/app.py +++ b/services/api/src/api/app.py @@ -8,0 +9 @@ from libcache.simple_cache import connect_to_cache +from libcommon.logger import init_logger @@ -10 +10,0 @@ from libqueue.queue import connect_to_queue -from libutils.logger import init_logger @@ -19,13 +19 @@ from starlette_prometheus import PrometheusMiddleware -from api.config import ( - APP_HOSTNAME, - APP_NUM_WORKERS, - APP_PORT, - ASSETS_DIRECTORY, - EXTERNAL_AUTH_URL, - HF_ENDPOINT, - HF_TOKEN, - LOG_LEVEL, - MONGO_CACHE_DATABASE, - MONGO_QUEUE_DATABASE, - MONGO_URL, -) +from api.config import AppConfig, UvicornConfig @@ -41,5 +29,6 @@ def create_app() -> Starlette: - init_logger(log_level=LOG_LEVEL) - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) - show_assets_dir(ASSETS_DIRECTORY) - prometheus = Prometheus() + app_config = AppConfig() + init_logger(app_config.common.log_level) + connect_to_cache(database=app_config.cache.mongo_database, host=app_config.cache.mongo_url) + connect_to_queue(database=app_config.queue.mongo_database, host=app_config.cache.mongo_url) + show_assets_dir(assets_directory=app_config.cache.assets_directory) + prometheus = Prometheus(prometheus_multiproc_dir=app_config.api.prometheus_multiproc_dir) @@ -56 +45,6 @@ def create_app() -> Starlette: - Route("/is-valid", endpoint=create_is_valid_endpoint(EXTERNAL_AUTH_URL)), + Route( + "/is-valid", + endpoint=create_is_valid_endpoint( + external_auth_url=app_config.api.external_auth_url, + ), + ), @@ -61 +55,3 @@ def create_app() -> Starlette: - external_auth_url=EXTERNAL_AUTH_URL, hf_endpoint=HF_ENDPOINT, hf_token=HF_TOKEN + external_auth_url=app_config.api.external_auth_url, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, @@ -67 +63,3 @@ def create_app() -> Starlette: - external_auth_url=EXTERNAL_AUTH_URL, hf_endpoint=HF_ENDPOINT, hf_token=HF_TOKEN + external_auth_url=app_config.api.external_auth_url, + hf_endpoint=app_config.common.hf_endpoint, + hf_token=app_config.common.hf_token, @@ -74 +72,5 @@ def create_app() -> Starlette: - "/webhook", endpoint=create_webhook_endpoint(hf_endpoint=HF_ENDPOINT, hf_token=HF_TOKEN), methods=["POST"] + "/webhook", + endpoint=create_webhook_endpoint( + hf_endpoint=app_config.common.hf_endpoint, hf_token=app_config.common.hf_token + ), + methods=["POST"], @@ -84 +86,7 @@ def create_app() -> Starlette: - Mount("/assets", app=StaticFiles(directory=init_assets_dir(ASSETS_DIRECTORY), check_dir=True), name="assets"), + Mount( + "/assets", + app=StaticFiles( + directory=init_assets_dir(assets_directory=app_config.cache.assets_directory), check_dir=True + ), + name="assets", + ), @@ -91 +99,8 @@ def start() -> None: - uvicorn.run("app:create_app", host=APP_HOSTNAME, port=APP_PORT, factory=True, workers=APP_NUM_WORKERS) + uvicorn_config = UvicornConfig() + uvicorn.run( + "app:create_app", + host=uvicorn_config.hostname, + port=uvicorn_config.port, + factory=True, + workers=uvicorn_config.num_workers, + ) diff --git a/services/api/src/api/config.py b/services/api/src/api/config.py index f1bb71d5..a18ce0bc 100644 --- a/services/api/src/api/config.py +++ b/services/api/src/api/config.py @@ -4,35 +4,50 @@ -import os - -from libutils.utils import get_int_value, get_str_or_none_value, get_str_value - -from api.constants import ( - DEFAULT_APP_HOSTNAME, - DEFAULT_APP_NUM_WORKERS, - DEFAULT_APP_PORT, - DEFAULT_ASSETS_DIRECTORY, - DEFAULT_HF_AUTH_PATH, - DEFAULT_HF_ENDPOINT, - DEFAULT_HF_TOKEN, - DEFAULT_LOG_LEVEL, - DEFAULT_MAX_AGE_LONG_SECONDS, - DEFAULT_MAX_AGE_SHORT_SECONDS, - DEFAULT_MONGO_CACHE_DATABASE, - DEFAULT_MONGO_QUEUE_DATABASE, - DEFAULT_MONGO_URL, -) - -APP_HOSTNAME = get_str_value(d=os.environ, key="APP_HOSTNAME", default=DEFAULT_APP_HOSTNAME) -APP_NUM_WORKERS = get_int_value(d=os.environ, key="APP_NUM_WORKERS", default=DEFAULT_APP_NUM_WORKERS) -APP_PORT = get_int_value(d=os.environ, key="APP_PORT", default=DEFAULT_APP_PORT) -ASSETS_DIRECTORY = get_str_or_none_value(d=os.environ, key="ASSETS_DIRECTORY", default=DEFAULT_ASSETS_DIRECTORY) -HF_AUTH_PATH = get_str_or_none_value(d=os.environ, key="HF_AUTH_PATH", default=DEFAULT_HF_AUTH_PATH) -HF_ENDPOINT = get_str_value(d=os.environ, key="HF_ENDPOINT", default=DEFAULT_HF_ENDPOINT) -HF_TOKEN = get_str_or_none_value(d=os.environ, key="HF_TOKEN", default=DEFAULT_HF_TOKEN) -LOG_LEVEL = get_str_value(d=os.environ, key="LOG_LEVEL", default=DEFAULT_LOG_LEVEL) -MAX_AGE_LONG_SECONDS = get_int_value(d=os.environ, key="MAX_AGE_LONG_SECONDS", default=DEFAULT_MAX_AGE_LONG_SECONDS) -MAX_AGE_SHORT_SECONDS = get_int_value(d=os.environ, key="MAX_AGE_SHORT_SECONDS", default=DEFAULT_MAX_AGE_SHORT_SECONDS) -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) - -EXTERNAL_AUTH_URL = None if HF_AUTH_PATH is None else f"{HF_ENDPOINT}{HF_AUTH_PATH}" +from typing import Optional + +from environs import Env +from libcache.config import CacheConfig +from libcommon.config import CommonConfig +from libqueue.config import QueueConfig + + +class UvicornConfig: + hostname: str + num_workers: int + port: int + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("API_UVICORN_"): + self.hostname = env.str(name="HOSTNAME", default="localhost") + self.num_workers = env.int(name="NUM_WORKERS", default=2) + self.port = env.int(name="PORT", default=8000) + + +class ApiConfig: + external_auth_url: Optional[str] + hf_auth_path: str + max_age_long: int + max_age_short: int + prometheus_multiproc_dir: Optional[str] + + def __init__(self, hf_endpoint: str): + env = Env(expand_vars=True) + with env.prefixed("API_"): + self.hf_auth_path = env.str(name="HF_AUTH_PATH", default="/api/datasets/%s/auth-check") + self.max_age_long = env.int(name="MAX_AGE_LONG", default=120) # 2 minutes + self.max_age_short = env.int(name="MAX_AGE_SHORT", default=10) # 10 seconds + prometheus_multiproc_dir = env.str(name="PROMETHEUS_MULTIPROC_DIR", default="") + self.prometheus_multiproc_dir = None if prometheus_multiproc_dir == "" else prometheus_multiproc_dir + self.external_auth_url = None if self.hf_auth_path is None else f"{hf_endpoint}{self.hf_auth_path}" + + +class AppConfig: + api: ApiConfig + cache: CacheConfig + common: CommonConfig + queue: QueueConfig + + def __init__(self): + self.cache = CacheConfig() + self.common = CommonConfig() + self.queue = QueueConfig() + self.api = ApiConfig(hf_endpoint=self.common.hf_endpoint) diff --git a/services/api/src/api/constants.py b/services/api/src/api/constants.py deleted file mode 100644 index c1f298de..00000000 --- a/services/api/src/api/constants.py +++ /dev/null @@ -1,19 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Optional - -DEFAULT_APP_HOSTNAME: str = "localhost" -DEFAULT_APP_NUM_WORKERS: int = 2 -DEFAULT_APP_PORT: int = 8000 -DEFAULT_ASSETS_DIRECTORY: None = None -DEFAULT_DATASETS_ENABLE_PRIVATE: bool = False -DEFAULT_HF_AUTH_PATH: str = "/api/datasets/%s/auth-check" -DEFAULT_HF_ENDPOINT: str = "https://huggingface.co" -DEFAULT_HF_TOKEN: Optional[str] = None -DEFAULT_LOG_LEVEL: str = "INFO" -DEFAULT_MAX_AGE_LONG_SECONDS: int = 120 # 2 minutes -DEFAULT_MAX_AGE_SHORT_SECONDS: int = 10 # 10 seconds -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache" -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27018" diff --git a/services/api/src/api/prometheus.py b/services/api/src/api/prometheus.py index 2c2b4ee4..44b9d16a 100644 --- a/services/api/src/api/prometheus.py +++ b/services/api/src/api/prometheus.py @@ -4 +4 @@ -import os +from typing import Optional @@ -19,0 +20,5 @@ class Prometheus: + prometheus_multiproc_dir: Optional[str] + + def __init__(self, prometheus_multiproc_dir: Optional[str]): + self.prometheus_multiproc_dir = prometheus_multiproc_dir + @@ -22 +27 @@ class Prometheus: - if "PROMETHEUS_MULTIPROC_DIR" in os.environ: + if self.prometheus_multiproc_dir is not None: @@ -24 +29 @@ class Prometheus: - MultiProcessCollector(registry) + MultiProcessCollector(registry=registry, path=self.prometheus_multiproc_dir) diff --git a/services/api/src/api/routes/first_rows.py b/services/api/src/api/routes/first_rows.py index 20d476f8..353d5112 100644 --- a/services/api/src/api/routes/first_rows.py +++ b/services/api/src/api/routes/first_rows.py @@ -33,0 +34,2 @@ def create_first_rows_endpoint( + max_age_long: int = 0, + max_age_short: int = 0, @@ -49 +51 @@ def create_first_rows_endpoint( - return get_json_ok_response(response) + return get_json_ok_response(content=response, max_age=max_age_long) @@ -51 +53,3 @@ def create_first_rows_endpoint( - return get_json_error_response(response, http_status, error_code) + return get_json_error_response( + content=response, status_code=http_status, max_age=max_age_short, error_code=error_code + ) @@ -62 +66 @@ def create_first_rows_endpoint( - return get_json_api_error_response(e) + return get_json_api_error_response(error=e, max_age=max_age_short) @@ -64 +68 @@ def create_first_rows_endpoint( - return get_json_api_error_response(UnexpectedError("Unexpected error.", e)) + return get_json_api_error_response(error=UnexpectedError("Unexpected error.", e), max_age=max_age_short) diff --git a/services/api/src/api/routes/splits.py b/services/api/src/api/routes/splits.py index 7e86229f..eb9e3a21 100644 --- a/services/api/src/api/routes/splits.py +++ b/services/api/src/api/routes/splits.py @@ -31 +31,5 @@ def create_splits_endpoint( - hf_endpoint: str, hf_token: Optional[str] = None, external_auth_url: Optional[str] = None + hf_endpoint: str, + hf_token: Optional[str] = None, + external_auth_url: Optional[str] = None, + max_age_long: int = 0, + max_age_short: int = 0, @@ -45 +49 @@ def create_splits_endpoint( - return get_json_ok_response(response) + return get_json_ok_response(content=response, max_age=max_age_long) @@ -47 +51,3 @@ def create_splits_endpoint( - return get_json_error_response(response, http_status, error_code) + return get_json_error_response( + content=response, status_code=http_status, max_age=max_age_short, error_code=error_code + ) @@ -56 +62 @@ def create_splits_endpoint( - return get_json_api_error_response(e) + return get_json_api_error_response(error=e, max_age=max_age_short) @@ -58 +64 @@ def create_splits_endpoint( - return get_json_api_error_response(UnexpectedError("Unexpected error.", err)) + return get_json_api_error_response(error=UnexpectedError("Unexpected error.", err), max_age=max_age_short) diff --git a/services/api/src/api/routes/valid.py b/services/api/src/api/routes/valid.py index d64e0a41..d892d595 100644 --- a/services/api/src/api/routes/valid.py +++ b/services/api/src/api/routes/valid.py @@ -34 +34,3 @@ async def valid_endpoint(_: Request) -> Response: -def create_is_valid_endpoint(external_auth_url: Optional[str] = None) -> Endpoint: +def create_is_valid_endpoint( + external_auth_url: Optional[str] = None, max_age_long: int = 0, max_age_short: int = 0 +) -> Endpoint: @@ -46 +48 @@ def create_is_valid_endpoint(external_auth_url: Optional[str] = None) -> Endpoin - return get_json_ok_response(content) + return get_json_ok_response(content=content, max_age=max_age_long) @@ -48 +50 @@ def create_is_valid_endpoint(external_auth_url: Optional[str] = None) -> Endpoin - return get_json_api_error_response(e) + return get_json_api_error_response(error=e, max_age=max_age_short) @@ -50 +52 @@ def create_is_valid_endpoint(external_auth_url: Optional[str] = None) -> Endpoin - return get_json_api_error_response(UnexpectedError("Unexpected error.")) + return get_json_api_error_response(error=UnexpectedError("Unexpected error."), max_age=max_age_short) diff --git a/services/api/src/api/utils.py b/services/api/src/api/utils.py index bc06a1aa..c8cab845 100644 --- a/services/api/src/api/utils.py +++ b/services/api/src/api/utils.py @@ -8,2 +8,2 @@ from typing import Any, Callable, Coroutine, List, Literal, Optional -from libutils.exceptions import CustomError -from libutils.utils import orjson_dumps +from libcommon.exceptions import CustomError +from libcommon.utils import orjson_dumps @@ -13,2 +12,0 @@ from starlette.responses import JSONResponse, Response -from api.config import MAX_AGE_LONG_SECONDS, MAX_AGE_SHORT_SECONDS - @@ -102 +100 @@ class OrjsonResponse(JSONResponse): - return orjson_dumps(content) + return orjson_dumps(content=content) @@ -107 +105 @@ def get_response(content: Any, status_code: int = 200, max_age: int = 0) -> Resp - return OrjsonResponse(content, status_code=status_code, headers=headers) + return OrjsonResponse(content=content, status_code=status_code, headers=headers) @@ -116 +114 @@ def get_json_response( - return OrjsonResponse(content, status_code=status_code.value, headers=headers) + return OrjsonResponse(content=content, status_code=status_code.value, headers=headers) @@ -119,2 +117,2 @@ def get_json_response( -def get_json_ok_response(content: Any) -> Response: - return get_json_response(content, max_age=MAX_AGE_LONG_SECONDS) +def get_json_ok_response(content: Any, max_age: int = 0) -> Response: + return get_json_response(content=content, max_age=max_age) @@ -124 +122 @@ def get_json_error_response( - content: Any, status_code: HTTPStatus = HTTPStatus.OK, error_code: Optional[str] = None + content: Any, status_code: HTTPStatus = HTTPStatus.OK, max_age: int = 0, error_code: Optional[str] = None @@ -126 +124 @@ def get_json_error_response( - return get_json_response(content, status_code=status_code, max_age=MAX_AGE_SHORT_SECONDS, error_code=error_code) + return get_json_response(content=content, status_code=status_code, max_age=max_age, error_code=error_code) @@ -129,2 +127,4 @@ def get_json_error_response( -def get_json_api_error_response(error: ApiCustomError) -> Response: - return get_json_error_response(error.as_response(), error.status_code, error.code) +def get_json_api_error_response(error: ApiCustomError, max_age: int = 0) -> Response: + return get_json_error_response( + content=error.as_response(), status_code=error.status_code, max_age=max_age, error_code=error.code + ) diff --git a/services/api/tests/conftest.py b/services/api/tests/conftest.py index 50f0e847..4ad69dcb 100644 --- a/services/api/tests/conftest.py +++ b/services/api/tests/conftest.py @@ -4 +4 @@ -import os +from pytest import MonkeyPatch, fixture @@ -6 +6 @@ import os -import pytest +from api.config import AppConfig, UvicornConfig @@ -8,4 +7,0 @@ import pytest -port = 8888 -host = "localhost" -HF_ENDPOINT = f"http://{host}:{port}" -HF_AUTH_PATH = "/api/datasets/%s/auth-check" @@ -13,2 +9,13 @@ HF_AUTH_PATH = "/api/datasets/%s/auth-check" -os.environ["HF_ENDPOINT"] = HF_ENDPOINT -os.environ["HF_AUTH_PATH"] = HF_AUTH_PATH +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + hostname = "localhost" + port = "8888" + monkeypatch_session.setenv("API_UVICORN_HOSTNAME", hostname) + monkeypatch_session.setenv("API_UVICORN_PORT", port) + monkeypatch_session.setenv("COMMON_HF_ENDPOINT", f"http://{hostname}:{port}") + yield monkeypatch_session + monkeypatch_session.undo() @@ -17,3 +24,6 @@ os.environ["HF_AUTH_PATH"] = HF_AUTH_PATH [email protected](scope="session") -def httpserver_listen_address(): - return (host, 8888) +@fixture(scope="session") +def app_config(monkeypatch_session: MonkeyPatch) -> AppConfig: + app_config = AppConfig() + if "test" not in app_config.cache.mongo_database or "test" not in app_config.queue.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return app_config @@ -22,3 +32,3 @@ def httpserver_listen_address(): [email protected](scope="session") -def hf_endpoint(): - return HF_ENDPOINT +@fixture(scope="session") +def uvicorn_config(monkeypatch_session: MonkeyPatch): + return UvicornConfig() @@ -27,3 +37,13 @@ def hf_endpoint(): [email protected](scope="session") -def hf_auth_path(): - return HF_AUTH_PATH +@fixture(scope="session") +def httpserver_listen_address(uvicorn_config: UvicornConfig): + return (uvicorn_config.hostname, uvicorn_config.port) + + +@fixture(scope="session") +def hf_endpoint(app_config: AppConfig): + return app_config.common.hf_endpoint + + +@fixture(scope="session") +def hf_auth_path(app_config: AppConfig): + return app_config.api.hf_auth_path diff --git a/services/api/tests/test_app.py b/services/api/tests/test_app.py index 4148db95..845e9e70 100644 --- a/services/api/tests/test_app.py +++ b/services/api/tests/test_app.py @@ -16 +15,0 @@ from api.app import create_app -from api.config import EXTERNAL_AUTH_URL, MONGO_CACHE_DATABASE, MONGO_QUEUE_DATABASE @@ -21,10 +19,0 @@ from .utils import auth_callback -external_auth_url = EXTERNAL_AUTH_URL or "%s" # for mypy - - [email protected](autouse=True, scope="module") -def safe_guard() -> None: - if "test" not in MONGO_CACHE_DATABASE: - raise ValueError("Tests on cache must be launched on a test mongo database") - if "test" not in MONGO_QUEUE_DATABASE: - raise ValueError("Tests on queue must be launched on a test mongo database") - @@ -33 +22 @@ def safe_guard() -> None: -def client() -> TestClient: +def client(monkeypatch_session: pytest.MonkeyPatch) -> TestClient: diff --git a/services/reverse-proxy/README.md b/services/reverse-proxy/README.md index d8b8e40b..e6de8256 100644 --- a/services/reverse-proxy/README.md +++ b/services/reverse-proxy/README.md @@ -17 +17 @@ It takes various environment variables, all of them are mandatory: -- `ASSETS_DIRECTORY`: the directory that contains the static assets, eg `/assets` +- `COMMON_ASSETS_DIRECTORY`: the directory that contains the static assets, eg `/assets` @@ -25 +25 @@ The image requires three directories to be mounted (from volumes): -- `$ASSETS_DIRECTORY` (read-only): the directory that contains the static assets. +- `$COMMON_ASSETS_DIRECTORY` (read-only): the directory that contains the static assets. diff --git a/tools/DockerRemoteImages.mk b/tools/DockerRemoteImages.mk index a01a046f..c83de45d 100644 --- a/tools/DockerRemoteImages.mk +++ b/tools/DockerRemoteImages.mk @@ -1,2 +0,0 @@ -export IMAGE_ADMIN := $(shell jq -r '.dockerImage.admin' ${DOCKER_IMAGES}) -export IMAGE_API := $(shell jq -r '.dockerImage.api' ${DOCKER_IMAGES}) @@ -4,2 +2,4 @@ export IMAGE_REVERSE_PROXY := $(shell jq -r '.dockerImage.reverseProxy' ${DOCKER -export IMAGE_WORKER_SPLITS := $(shell jq -r '.dockerImage.worker.splits' ${DOCKER_IMAGES}) -export IMAGE_WORKER_FIRST_ROWS := $(shell jq -r '.dockerImage.worker.firstRows' ${DOCKER_IMAGES}) +export IMAGE_ADMIN := $(shell jq -r '.dockerImage.services.admin' ${DOCKER_IMAGES}) +export IMAGE_API := $(shell jq -r '.dockerImage.services.api' ${DOCKER_IMAGES}) +export IMAGE_WORKER_SPLITS := $(shell jq -r '.dockerImage.workers.splits' ${DOCKER_IMAGES}) +export IMAGE_WORKER_FIRST_ROWS := $(shell jq -r '.dockerImage.workers.firstRows' ${DOCKER_IMAGES}) diff --git a/tools/Python.mk b/tools/Python.mk index 1fa36697..32215a43 100644 --- a/tools/Python.mk +++ b/tools/Python.mk @@ -2,2 +1,0 @@ -export TEST_MONGO_URL := mongodb://localhost:${TEST_MONGO_PORT} - @@ -35,9 +32,0 @@ style: - -.PHONY: test-target -test-target: - MONGO_URL=${TEST_MONGO_URL} MONGO_QUEUE_DATABASE=${TEST_MONGO_QUEUE_DATABASE} MONGO_CACHE_DATABASE=${TEST_MONGO_CACHE_DATABASE} ROWS_MAX_NUMBER=${TEST_ROWS_MAX_NUMBER} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} poetry run python -m pytest -vv -x $(TEST_TARGET) $(PYTEST_ARGS) - -.PHONY: test-target-expression -test-target-expression: - MONGO_URL=${TEST_MONGO_URL} MONGO_QUEUE_DATABASE=${TEST_MONGO_QUEUE_DATABASE} MONGO_CACHE_DATABASE=${TEST_MONGO_CACHE_DATABASE} ROWS_MAX_NUMBER=${TEST_ROWS_MAX_NUMBER} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} poetry run python -m pytest -vv -x $(TEST_TARGET) -k $(TEST_EXPRESSION) $(PYTEST_ARGS) - @@ -46,4 +35,4 @@ test: - COMPOSE_PROJECT_NAME=${TEST_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${TEST_DOCKER_COMPOSE} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} $(MAKE) down - MONGO_PORT=${TEST_MONGO_PORT} COMPOSE_PROJECT_NAME=${TEST_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${TEST_DOCKER_COMPOSE} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} ROWS_MAX_NUMBER=${TEST_ROWS_MAX_NUMBER} $(MAKE) up - TEST_TARGET=tests make test-target - COMPOSE_PROJECT_NAME=${TEST_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${TEST_DOCKER_COMPOSE} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} $(MAKE) down + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -vv -x tests + $(MAKE) down @@ -53,4 +42,4 @@ coverage: - COMPOSE_PROJECT_NAME=${TEST_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${TEST_DOCKER_COMPOSE} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} $(MAKE) down - MONGO_PORT=${TEST_MONGO_PORT} COMPOSE_PROJECT_NAME=${TEST_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${TEST_DOCKER_COMPOSE} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} ROWS_MAX_NUMBER=${TEST_ROWS_MAX_NUMBER} $(MAKE) up - MONGO_URL=${TEST_MONGO_URL} MONGO_QUEUE_DATABASE=${TEST_MONGO_QUEUE_DATABASE} MONGO_CACHE_DATABASE=${TEST_MONGO_CACHE_DATABASE} ROWS_MAX_NUMBER=${TEST_ROWS_MAX_NUMBER} poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests - COMPOSE_PROJECT_NAME=${TEST_COMPOSE_PROJECT_NAME} DOCKER_COMPOSE=${TEST_DOCKER_COMPOSE} HF_ENDPOINT=${TEST_HF_ENDPOINT} HF_TOKEN=${TEST_HF_TOKEN} $(MAKE) down + $(MAKE) down + $(MAKE) up + poetry run python -m pytest -s --cov --cov-report xml:coverage.xml --cov-report=term tests + $(MAKE) down diff --git a/tools/docker-compose-datasets-server-from-local-code.yml b/tools/docker-compose-datasets-server-from-local-code.yml index 5394c13b..faf96e86 100644 --- a/tools/docker-compose-datasets-server-from-local-code.yml +++ b/tools/docker-compose-datasets-server-from-local-code.yml @@ -4 +4 @@ services: - image: nginx:1.20 + image: ${IMAGE_REVERSE_PROXY?IMAGE_REVERSE_PROXY env var must be provided} @@ -7,2 +7 @@ services: - - reverse-proxy-cache:/nginx-cache - - assets:/assets:ro + - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro @@ -13,9 +12,5 @@ services: - - ASSETS_DIRECTORY=/assets - - CACHE_DIRECTORY=/nginx-cache - - CACHE_INACTIVE=24h - - CACHE_MAX_SIZE=1g - - CACHE_ZONE_SIZE=50m - - HOST=localhost - - PORT=80 - - URL_ADMIN=http://admin:8081 - - URL_API=http://api:8080 + ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + HOST: localhost + PORT: 80 + URL_ADMIN: http://admin:${ADMIN_UVICORN_PORT-8081} + URL_API: http://api:${API_UVICORN_PORT-8080} @@ -23,2 +18,34 @@ services: - api: - condition: service_started + - api + - admin + admin: + build: + context: .. + dockerfile: services/admin/Dockerfile + environment: + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-} + ADMIN_CACHE_REPORTS_NUM_RESULTS: ${ADMIN_CACHE_REPORTS_NUM_RESULTS-100} + ADMIN_HF_WHOAMI_PATH: ${ADMIN_HF_WHOAMI_PATH-/api/whoami-v2} + ADMIN_MAX_AGE: ${ADMIN_MAX_AGE-10} + ADMIN_PROMETHEUS_MULTIPROC_DIR: ${ADMIN_PROMETHEUS_MULTIPROC_DIR-} + ADMIN_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose + ADMIN_UVICORN_NUM_WORKERS: ${ADMIN_UVICORN_NUM_WORKERS-2} + ADMIN_UVICORN_PORT: ${ADMIN_UVICORN_PORT-8081} + depends_on: + - mongodb + restart: always + ports: + # for debug + - ${ADMIN_UVICORN_PORT-8081}:${ADMIN_UVICORN_PORT-8081} @@ -30 +57 @@ services: - - assets:/assets:ro + - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro @@ -32,7 +59,20 @@ services: - APP_HOSTNAME: 0.0.0.0 - APP_NUM_WORKERS: 1 - APP_PORT: 8080 - ASSETS_DIRECTORY: "/assets" - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - MONGO_URL: "mongodb://mongodb" + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + API_HF_AUTH_PATH: ${API_HF_AUTH_PATH-/api/datasets/%s/auth-check} + API_MAX_AGE_LONG: ${API_MAX_AGE_LONG-120} + API_MAX_AGE_SHORT: ${API_MAX_AGE_SHORT-10} + API_PROMETHEUS_MULTIPROC_DIR: ${API_PROMETHEUS_MULTIPROC_DIR-} + API_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose + API_UVICORN_NUM_WORKERS: ${API_UVICORN_NUM_WORKERS-2} + API_UVICORN_PORT: ${API_UVICORN_PORT-8080} @@ -41 +81 @@ services: - - ${PORT_API-8080}:8080 + - ${API_UVICORN_PORT-8080}:${API_UVICORN_PORT-8080} @@ -43,2 +83 @@ services: - mongodb: - condition: service_started + - mongodb @@ -49 +88 @@ services: - dockerfile: ./workers/splits/Dockerfile + dockerfile: workers/splits/Dockerfile @@ -51 +90,3 @@ services: - - datasets-cache:/datasets-cache:rw + - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw @@ -53,4 +94,16 @@ services: - HF_DATASETS_CACHE: "/datasets-cache" - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - MONGO_URL: "mongodb://mongodb" + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} @@ -58,2 +111 @@ services: - mongodb: - condition: service_started + - mongodb @@ -64 +116 @@ services: - dockerfile: ./workers/first_rows/Dockerfile + dockerfile: workers/first-rows/Dockerfile @@ -66,2 +118,3 @@ services: - - assets:/assets:rw - - datasets-cache:/datasets-cache:rw + - first-rows-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - first-rows-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - first-rows-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw @@ -69,23 +122,21 @@ services: - ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" - ASSETS_DIRECTORY: "/assets" - HF_DATASETS_CACHE: "/datasets-cache" - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - MONGO_URL: "mongodb://mongodb" - depends_on: - mongodb: - condition: service_started - restart: always - admin: - build: - context: .. - dockerfile: ./services/admin/Dockerfile - environment: - APP_HOSTNAME: 0.0.0.0 - APP_NUM_WORKERS: 1 - APP_PORT: 8081 - ASSETS_DIRECTORY: "/assets" - MONGO_URL: "mongodb://mongodb" - ports: - # for debug - - ${PORT_ADMIN-8081}:8081 + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} + FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE: ${FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE-100_000_000} + FIRST_ROWS_MAX_BYTES: ${FIRST_ROWS_MAX_BYTES-1_000_000} + FIRST_ROWS_MAX_NUMBER: ${FIRST_ROWS_MAX_NUMBER-100} + FIRST_ROWS_MIN_CELL_BYTES: ${FIRST_ROWS_MIN_CELL_BYTES-100} + FIRST_ROWS_MIN_NUMBER: ${FIRST_ROWS_MIN_NUMBER-10} @@ -93,2 +144 @@ services: - mongodb: - condition: service_started + - mongodb @@ -105 +154,0 @@ volumes: - datasets-cache: @@ -107 +156,6 @@ volumes: - reverse-proxy-cache: + splits-datasets-cache: + splits-modules-cache: + splits-numba-cache: + first-rows-datasets-cache: + first-rows-modules-cache: + first-rows-numba-cache: diff --git a/tools/docker-compose-datasets-server-from-remote-images.yml b/tools/docker-compose-datasets-server-from-remote-images.yml index 2e75aa35..15eb2648 100644 --- a/tools/docker-compose-datasets-server-from-remote-images.yml +++ b/tools/docker-compose-datasets-server-from-remote-images.yml @@ -7,2 +7 @@ services: - - reverse-proxy-cache:/nginx-cache - - assets:/assets:ro + - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro @@ -13,5 +12 @@ services: - ASSETS_DIRECTORY: /assets - CACHE_DIRECTORY: /nginx-cache - CACHE_INACTIVE: 24h - CACHE_MAX_SIZE: 1g - CACHE_ZONE_SIZE: 50m + ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} @@ -20,2 +15,2 @@ services: - URL_ADMIN: http://admin:8081 - URL_API: http://api:8080 + URL_ADMIN: http://admin:${ADMIN_UVICORN_PORT-8081} + URL_API: http://api:${API_UVICORN_PORT-8080} @@ -23,0 +19,31 @@ services: + - admin + admin: + image: ${IMAGE_ADMIN?IMAGE_ADMIN env var must be provided} + environment: + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + ADMIN_HF_ORGANIZATION: ${ADMIN_HF_ORGANIZATION-} + ADMIN_CACHE_REPORTS_NUM_RESULTS: ${ADMIN_CACHE_REPORTS_NUM_RESULTS-100} + ADMIN_HF_WHOAMI_PATH: ${ADMIN_HF_WHOAMI_PATH-/api/whoami-v2} + ADMIN_MAX_AGE: ${ADMIN_MAX_AGE-10} + ADMIN_PROMETHEUS_MULTIPROC_DIR: ${ADMIN_PROMETHEUS_MULTIPROC_DIR-} + ADMIN_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose + ADMIN_UVICORN_NUM_WORKERS: ${ADMIN_UVICORN_NUM_WORKERS-2} + ADMIN_UVICORN_PORT: ${ADMIN_UVICORN_PORT-8081} + depends_on: + - mongodb + restart: always + ports: + # for debug + - ${ADMIN_UVICORN_PORT-8081}:${ADMIN_UVICORN_PORT-8081} @@ -27 +53 @@ services: - - assets:/assets:ro + - assets:${CACHE_ASSETS_DIRECTORY-/assets}:ro @@ -29,10 +55,20 @@ services: - APP_HOSTNAME: 0.0.0.0 - APP_NUM_WORKERS: 1 - APP_PORT: 8080 - ASSETS_DIRECTORY: "/assets" - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - # use shorter cache durations for the e2e tests - MAX_AGE_SHORT_SECONDS: 1 - MAX_AGE_LONG_SECONDS: 2 - MONGO_URL: "mongodb://mongodb" + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + API_HF_AUTH_PATH: ${API_HF_AUTH_PATH-/api/datasets/%s/auth-check} + API_MAX_AGE_LONG: ${API_MAX_AGE_LONG-120} + API_MAX_AGE_SHORT: ${API_MAX_AGE_SHORT-10} + API_PROMETHEUS_MULTIPROC_DIR: ${API_PROMETHEUS_MULTIPROC_DIR-} + API_UVICORN_HOSTNAME: 0.0.0.0 # required for docker-compose + API_UVICORN_NUM_WORKERS: ${API_UVICORN_NUM_WORKERS-2} + API_UVICORN_PORT: ${API_UVICORN_PORT-8080} @@ -41 +77 @@ services: - - ${PORT_API-8080}:8080 + - ${API_UVICORN_PORT-8080}:${API_UVICORN_PORT-8080} @@ -48 +84,3 @@ services: - - datasets-cache:/datasets-cache:rw + - splits-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - splits-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - splits-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw @@ -50,5 +88,16 @@ services: - HF_DATASETS_CACHE: "/datasets-cache" - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - MONGO_URL: "mongodb://mongodb" - WORKER_SLEEP_SECONDS: "1" + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} @@ -61,2 +110,3 @@ services: - - assets:/assets:rw - - datasets-cache:/datasets-cache:rw + - first-rows-datasets-cache:${HF_DATASETS_CACHE-/datasets-cache}:rw + - first-rows-modules-cache:${HF_DATASETS_CACHE-/modules-cache}:rw + - first-rows-numba-cache:${NUMBA_CACHE_DIR-/numba-cache}:rw @@ -64,8 +114,21 @@ services: - ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" - ASSETS_DIRECTORY: "/assets" - HF_DATASETS_CACHE: "/datasets-cache" - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - MONGO_URL: "mongodb://mongodb" - ROWS_MAX_NUMBER: ${ROWS_MAX_NUMBER-100} - WORKER_SLEEP_SECONDS: "1" + CACHE_ASSETS_DIRECTORY: ${CACHE_ASSETS_DIRECTORY-/assets} + CACHE_MONGO_URL: ${CACHE_MONGO_URL-mongodb://mongodb} # use mongo container by default + CACHE_MONGO_DATABASE: ${CACHE_MONGO_DATABASE-datasets_server_cache} + QUEUE_MAX_JOBS_PER_DATASET: ${QUEUE_MAX_JOBS_PER_DATASET-1} + QUEUE_MAX_LOAD_PCT: ${QUEUE_MAX_LOAD_PCT-70} + QUEUE_MAX_MEMORY_PCT: ${QUEUE_MAX_MEMORY_PCT-80} + QUEUE_MONGO_URL: ${QUEUE_MONGO_URL-mongodb://mongodb} # use mongo container by default + QUEUE_MONGO_DATABASE: ${QUEUE_MONGO_DATABASE-datasets_server_queue} + QUEUE_SLEEP_SECONDS: ${QUEUE_SLEEP_SECONDS-15} + COMMON_ASSETS_BASE_URL: "http://localhost:${PORT_REVERSE_PROXY-8000}/assets" # hard-coded to work with the reverse-proxy + COMMON_HF_ENDPOINT: ${COMMON_HF_ENDPOINT-https://huggingface.co} + COMMON_HF_TOKEN: ${COMMON_HF_TOKEN-} + COMMON_LOG_LEVEL: ${COMMON_LOG_LEVEL-INFO} + HF_DATASETS_CACHE: ${HF_DATASETS_CACHE-/datasets-cache} + HF_MODULES_CACHE: ${HF_MODULES_CACHE-/modules-cache} + NUMBA_CACHE_DIR: ${NUMBA_CACHE_DIR-/numba-cache} + FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE: ${FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE-100_000_000} + FIRST_ROWS_MAX_BYTES: ${FIRST_ROWS_MAX_BYTES-1_000_000} + FIRST_ROWS_MAX_NUMBER: ${FIRST_ROWS_MAX_NUMBER-100} + FIRST_ROWS_MIN_CELL_BYTES: ${FIRST_ROWS_MIN_CELL_BYTES-100} + FIRST_ROWS_MIN_NUMBER: ${FIRST_ROWS_MIN_NUMBER-10} @@ -75,14 +137,0 @@ services: - admin: - image: ${IMAGE_ADMIN?IMAGE_ADMIN env var must be provided} - environment: - APP_HOSTNAME: 0.0.0.0 - APP_NUM_WORKERS: 1 - APP_PORT: 8081 - ASSETS_DIRECTORY: "/assets" - MONGO_URL: "mongodb://mongodb" - depends_on: - - mongodb - restart: always - ports: - # for debug - - ${PORT_ADMIN-8081}:8081 @@ -98 +146,0 @@ volumes: - datasets-cache: @@ -100 +148,6 @@ volumes: - reverse-proxy-cache: + splits-datasets-cache: + splits-modules-cache: + splits-numba-cache: + first-rows-datasets-cache: + first-rows-modules-cache: + first-rows-numba-cache: diff --git a/workers/first_rows/Dockerfile b/workers/first_rows/Dockerfile index 18853f9d..4cc75d75 100644 --- a/workers/first_rows/Dockerfile +++ b/workers/first_rows/Dockerfile @@ -28 +28 @@ COPY libs/libqueue/dist ./libs/libqueue/dist -COPY libs/libutils/dist ./libs/libutils/dist +COPY libs/libcommon/dist ./libs/libcommon/dist diff --git a/workers/first_rows/Makefile b/workers/first_rows/Makefile index 0ce2c3d8..742e44e2 100644 --- a/workers/first_rows/Makefile +++ b/workers/first_rows/Makefile @@ -2,7 +2,4 @@ -export TEST_MONGO_PORT := 27041 -export TEST_MONGO_CACHE_DATABASE := datasets_server_cache_test -export TEST_MONGO_QUEUE_DATABASE := datasets_server_queue_test -export TEST_ROWS_MAX_NUMBER := 5 -export TEST_COMPOSE_PROJECT_NAME := first_rows -export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co -export TEST_HF_TOKEN := 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 +export COMPOSE_PROJECT_NAME := first_rows +export MONGO_PORT := 27041 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} @@ -10 +7 @@ export TEST_HF_TOKEN := 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 -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml @@ -13 +9,0 @@ TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml -# Ensure to specify HF_TOKEN when calling make test, ie HF_TOKEN=hf_app_xxx make test diff --git a/workers/first_rows/README.md b/workers/first_rows/README.md index dcd76c9e..6bd22919 100644 --- a/workers/first_rows/README.md +++ b/workers/first_rows/README.md @@ -7 +7,15 @@ -Set environment variables to configure the following aspects: +The worker con be configured using environment variables. They are grouped by scope. + +### First rows worker + +Set environment variables to configure the first rows worker (`FIRST_ROWS_` prefix): + +- `FIRST_ROWS_FALLBACK_MAX_DATASET_SIZE`: the maximum size in bytes of the dataset to fallback in normal mode if streaming fails. Note that it requires to have the size in the info metadata. Set to `0` to disable the fallback. Defaults to `100_000_000`. +- `FIRST_ROWS_MAX_BYTES`: the max size of the /first-rows endpoint response in bytes. Defaults to `1_000_000` (1 MB). +- `FIRST_ROWS_MAX_NUMBER`: the max number of rows fetched by the worker for the split, and provided in the /first-rows endpoint response. Defaults to `100`. +- `FIRST_ROWS_MIN_CELL_BYTES`: the minimum size in bytes of a cell when truncating the content of a row (see `FIRST_ROWS_ROWS_MAX_BYTES`). Below this limit, the cell content will not be truncated. Defaults to `100`. +- `FIRST_ROWS_MIN_NUMBER`: the min number of rows fetched by the worker for the split, and provided in the /first-rows endpoint response. Defaults to `10`. + +### Datasets library + +The following environment variables are used to configure two dependencies: the `datasets` and `numba` libraries: @@ -9,2 +22,0 @@ Set environment variables to configure the following aspects: -- `ASSETS_BASE_URL`: base URL for the assets files. It should be set accordingly to the datasets-server domain, eg https://datasets-server.huggingface.co/assets. Defaults to `assets`. -- `ASSETS_DIRECTORY`: directory where the asset files are stored. Defaults to empty, in which case the assets are located in the `datasets_server_assets` subdirectory inside the OS default cache directory. @@ -13,11 +24,0 @@ Set environment variables to configure the following aspects: -- `HF_ENDPOINT`: URL of the HuggingFace Hub. Defaults to `https://huggingface.co`. -- `HF_TOKEN`: App Access Token (ask moonlanding administrators to get one, only the `read` role is required), to access the gated datasets. Defaults to empty. -- `LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. Defaults to `INFO`. -- `MAX_JOBS_PER_DATASET`: the maximum number of started jobs for the same dataset. Defaults to 1. -- `MAX_LOAD_PCT`: the maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of cpus \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. -- `MAX_MEMORY_PCT`: the maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. -- `MAX_SIZE_FALLBACK`: the maximum size in bytes of the dataset to fallback in normal mode if streaming fails. Note that it requires to have the size in the info metadata. Set to `0` to disable the fallback. Defaults to `100_000_000`. -- `MIN_CELL_BYTES`: the minimum size in bytes of a cell when truncating the content of a row (see `ROWS_MAX_BYTES`). Below this limit, the cell content will not be truncated. Defaults to `100`. -- `MONGO_CACHE_DATABASE`: the name of the database used for storing the cache. Defaults to `"datasets_server_cache"`. -- `MONGO_QUEUE_DATABASE`: the name of the database used for storing the queue. Defaults to `"datasets_server_queue"`. -- `MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. @@ -25,4 +26,12 @@ Set environment variables to configure the following aspects: -- `ROWS_MAX_BYTES`: the max size of the /first-rows endpoint response in bytes. Defaults to `1_000_000` (1 MB). -- `ROWS_MAX_NUMBER`: the max number of rows fetched by the worker for the split, and provided in the /first-rows endpoint response. Defaults to `100`. -- `ROWS_MIN_NUMBER`: the min number of rows fetched by the worker for the split, and provided in the /first-rows endpoint response. Defaults to `10`. -- `WORKER_SLEEP_SECONDS`: duration in seconds of a worker wait loop iteration, before checking if resources are available and processing a job if any is available. Note that the worker does not sleep on the first loop after finishing a job. Defaults to `15`. + +### Cache + +See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. + +### Queue + +See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. + +### Common + +See [../../libs/libcommon/README.md](../../libs/libcommon/README.md) for more information about the common configuration. diff --git a/workers/first_rows/poetry.lock b/workers/first_rows/poetry.lock index 626dcc30..3762689a 100644 --- a/workers/first_rows/poetry.lock +++ b/workers/first_rows/poetry.lock @@ -40,17 +39,0 @@ frozenlist = ">=1.1.0" -[[package]] -name = "anyio" -version = "3.6.1" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16)"] - @@ -376 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -382 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -440,0 +424,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -506 +507 @@ name = "fsspec" -version = "2022.8.2" +version = "2022.10.0" @@ -549 +550 @@ name = "gdown" -version = "4.5.1" +version = "4.5.3" @@ -586 +587 @@ name = "google-auth" -version = "2.12.0" +version = "2.13.0" @@ -822 +823 @@ name = "libcache" -version = "0.2.1" +version = "0.3.0" @@ -829,0 +831 @@ appdirs = ">=1.4.4,<2.0.0" +environs = ">=9.5.0,<10.0.0" @@ -836 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" @@ -845,0 +848,16 @@ python-versions = "*" +[[package]] +name = "libcommon" +version = "0.3.1" +description = "Library for utils, common to all the services and workers" +category = "main" +optional = false +python-versions = "==3.9.6" + +[package.dependencies] +environs = ">=9.5.0,<10.0.0" +orjson = ">=3.6.4,<4.0.0" + +[package.source] +type = "file" +url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" + @@ -848 +866 @@ name = "libqueue" -version = "0.3.2" +version = "0.4.1" @@ -854,0 +873 @@ python-versions = "==3.9.6" +environs = ">=9.5.0,<10.0.0" @@ -862 +881 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" @@ -890,16 +908,0 @@ tests = ["matplotlib (>=3.3.0)", "pytest-mpl", "pytest-cov", "pytest", "contextl -[[package]] -name = "libutils" -version = "0.2.0" -description = "Library for utils" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -orjson = ">=3.6.4,<4.0.0" -starlette = ">=0.16.0,<0.17.0" - -[package.source] -type = "file" -url = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl" - @@ -962,0 +966,17 @@ python-versions = ">=3.7" +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + @@ -1118 +1138 @@ name = "oauthlib" -version = "3.2.1" +version = "3.2.2" @@ -1539,0 +1560,11 @@ six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -1542 +1573 @@ name = "pytz" -version = "2022.4" +version = "2022.5" @@ -1762,8 +1792,0 @@ python-versions = ">=3.6" -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "main" -optional = false -python-versions = ">=3.7" - @@ -1792,14 +1814,0 @@ python-versions = ">=3.6" -[[package]] -name = "starlette" -version = "0.16.0" -description = "The little ASGI library that shines." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -anyio = ">=3.0.0,<4" - -[package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests", "graphene"] - @@ -2170 +2179 @@ name = "types-urllib3" -version = "1.26.25" +version = "1.26.25.1" @@ -2284 +2293 @@ python-versions = "3.9.6" -content-hash = "79cc470566eb3d8ef81f1f1239de17211b4b2f139951122bb90c9c574cfef35d" +content-hash = "7205adaa92829e625bdb89fa7a67c192aaaadd242cca0d45cb2cf5aed455631d" @@ -2293,4 +2301,0 @@ aiosignal = [ -anyio = [ - {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, - {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, -] @@ -2532,0 +2538 @@ dparse = [] +environs = [] @@ -2617 +2623 @@ libcache = [ - {file = "libcache-0.2.1-py3-none-any.whl", hash = "sha256:62c57b8e12a70241106cd9bcc7b845b40ba5ff9dd6423691de269a42f507943f"}, + {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, @@ -2619,0 +2626,3 @@ libclang = [] +libcommon = [ + {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, +] @@ -2621 +2630 @@ libqueue = [ - {file = "libqueue-0.3.2-py3-none-any.whl", hash = "sha256:1655472f2713ad5f89f819bf513aaf4ec6b6fe03d2858255136e5e2971a6c22f"}, + {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, @@ -2624,3 +2632,0 @@ librosa = [] -libutils = [ - {file = "libutils-0.2.0-py3-none-any.whl", hash = "sha256:a562dd39d4b3c5ab20bb11354e8eaf582d873f0367996df9a4c3c00609f608da"}, -] @@ -2634,0 +2641 @@ markupsafe = [] +marshmallow = [] @@ -3037,0 +3045 @@ python-dateutil = [ +python-dotenv = [] @@ -3110 +3117,0 @@ smmap = [ -sniffio = [] @@ -3116,4 +3122,0 @@ soupsieve = [ -starlette = [ - {file = "starlette-0.16.0-py3-none-any.whl", hash = "sha256:38eb24bf705a2c317e15868e384c1b8a12ca396e5a3c3a003db7e667c43f939f"}, - {file = "starlette-0.16.0.tar.gz", hash = "sha256:e1904b5d0007aee24bdd3c43994be9b3b729f4f58e740200de1d623f8c3a8870"}, -] diff --git a/workers/first_rows/pyproject.toml b/workers/first_rows/pyproject.toml index fd856f33..0d137d53 100644 --- a/workers/first_rows/pyproject.toml +++ b/workers/first_rows/pyproject.toml @@ -19,3 +19,3 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl", develop = false } -libutils = { path = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } diff --git a/workers/first_rows/src/first_rows/asset.py b/workers/first_rows/src/first_rows/asset.py index 335bbc17..fd63269e 100644 --- a/workers/first_rows/src/first_rows/asset.py +++ b/workers/first_rows/src/first_rows/asset.py @@ -7 +7 @@ from pathlib import Path -from typing import List, Tuple, TypedDict +from typing import List, Optional, Tuple, TypedDict @@ -15,2 +14,0 @@ from pydub import AudioSegment # type:ignore -from first_rows.config import ASSETS_DIRECTORY - @@ -23,2 +21,4 @@ ASSET_DIR_MODE = 0o755 -def create_asset_dir(dataset: str, config: str, split: str, row_idx: int, column: str) -> Tuple[Path, str]: - assets_dir = init_assets_dir(ASSETS_DIRECTORY) +def create_asset_dir( + dataset: str, config: str, split: str, row_idx: int, column: str, assets_directory: Optional[str] +) -> Tuple[Path, str]: + assets_dir = init_assets_dir(assets_directory) @@ -45,0 +46 @@ def create_image_file( + assets_directory: Optional[str], @@ -47 +48,3 @@ def create_image_file( - dir_path, url_dir_path = create_asset_dir(dataset, config, split, row_idx, column) + dir_path, url_dir_path = create_asset_dir( + dataset=dataset, config=config, split=split, row_idx=row_idx, column=column, assets_directory=assets_directory + ) @@ -71,0 +75 @@ def create_audio_files( + assets_directory: Optional[str], @@ -75 +79,3 @@ def create_audio_files( - dir_path, url_dir_path = create_asset_dir(dataset, config, split, row_idx, column) + dir_path, url_dir_path = create_asset_dir( + dataset=dataset, config=config, split=split, row_idx=row_idx, column=column, assets_directory=assets_directory + ) diff --git a/workers/first_rows/src/first_rows/config.py b/workers/first_rows/src/first_rows/config.py index 012532b1..b2e60541 100644 --- a/workers/first_rows/src/first_rows/config.py +++ b/workers/first_rows/src/first_rows/config.py @@ -4,2 +3,0 @@ -import os - @@ -8,46 +6,44 @@ from datasets.utils.logging import log_levels, set_verbosity -from libutils.utils import get_int_value, get_str_or_none_value, get_str_value - -from first_rows.constants import ( - DEFAULT_ASSETS_BASE_URL, - DEFAULT_ASSETS_DIRECTORY, - DEFAULT_HF_ENDPOINT, - DEFAULT_HF_TOKEN, - DEFAULT_LOG_LEVEL, - DEFAULT_MAX_JOBS_PER_DATASET, - DEFAULT_MAX_LOAD_PCT, - DEFAULT_MAX_MEMORY_PCT, - DEFAULT_MAX_SIZE_FALLBACK, - DEFAULT_MIN_CELL_BYTES, - DEFAULT_MONGO_CACHE_DATABASE, - DEFAULT_MONGO_QUEUE_DATABASE, - DEFAULT_MONGO_URL, - DEFAULT_ROWS_MAX_BYTES, - DEFAULT_ROWS_MAX_NUMBER, - DEFAULT_ROWS_MIN_NUMBER, - DEFAULT_WORKER_SLEEP_SECONDS, -) - -ASSETS_BASE_URL = get_str_value(d=os.environ, key="ASSETS_BASE_URL", default=DEFAULT_ASSETS_BASE_URL) -ASSETS_DIRECTORY = get_str_or_none_value(d=os.environ, key="ASSETS_DIRECTORY", default=DEFAULT_ASSETS_DIRECTORY) -HF_ENDPOINT = get_str_value(d=os.environ, key="HF_ENDPOINT", default=DEFAULT_HF_ENDPOINT) -HF_TOKEN = get_str_or_none_value(d=os.environ, key="HF_TOKEN", default=DEFAULT_HF_TOKEN) -LOG_LEVEL = get_str_value(d=os.environ, key="LOG_LEVEL", default=DEFAULT_LOG_LEVEL) -MAX_JOBS_PER_DATASET = get_int_value(os.environ, "MAX_JOBS_PER_DATASET", DEFAULT_MAX_JOBS_PER_DATASET) -MAX_LOAD_PCT = get_int_value(os.environ, "MAX_LOAD_PCT", DEFAULT_MAX_LOAD_PCT) -MAX_MEMORY_PCT = get_int_value(os.environ, "MAX_MEMORY_PCT", DEFAULT_MAX_MEMORY_PCT) -MAX_SIZE_FALLBACK = get_int_value(os.environ, "MAX_SIZE_FALLBACK", DEFAULT_MAX_SIZE_FALLBACK) -MIN_CELL_BYTES = get_int_value(os.environ, "MIN_CELL_BYTES", DEFAULT_MIN_CELL_BYTES) -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) -ROWS_MAX_BYTES = get_int_value(os.environ, "ROWS_MAX_BYTES", DEFAULT_ROWS_MAX_BYTES) -ROWS_MAX_NUMBER = get_int_value(os.environ, "ROWS_MAX_NUMBER", DEFAULT_ROWS_MAX_NUMBER) -ROWS_MIN_NUMBER = get_int_value(os.environ, "ROWS_MIN_NUMBER", DEFAULT_ROWS_MIN_NUMBER) -WORKER_SLEEP_SECONDS = get_int_value(os.environ, "WORKER_SLEEP_SECONDS", DEFAULT_WORKER_SLEEP_SECONDS) - -# Ensure the datasets library uses the expected HuggingFace endpoint -datasets.config.HF_ENDPOINT = HF_ENDPOINT -# Don't increase the datasets download counts on huggingface.co -datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False -# Set logs from the datasets library to the least verbose -set_verbosity(log_levels["critical"]) +from environs import Env +from libcache.config import CacheConfig +from libcommon.config import CommonConfig +from libqueue.config import QueueConfig + + +class FirstRowsConfig: + fallback_max_dataset_size: int + max_bytes: int + max_number: int + min_cell_bytes: int + min_number: int + + def __init__(self): + env = Env(expand_vars=True) + with env.prefixed("FIRST_ROWS_"): + self.fallback_max_dataset_size = env.int(name="FALLBACK_MAX_DATASET_SIZE", default=100_000_000) + self.max_bytes = env.int(name="MAX_BYTES", default=1_000_000) + self.max_number = env.int(name="MAX_NUMBER", default=100) + self.min_cell_bytes = env.int(name="CELL_MIN_BYTES", default=100) + self.min_number = env.int(name="MIN_NUMBER", default=10) + + +class WorkerConfig: + cache: CacheConfig + common: CommonConfig + first_rows: FirstRowsConfig + queue: QueueConfig + + def __init__(self): + self.cache = CacheConfig() + self.common = CommonConfig() + self.first_rows = FirstRowsConfig() + self.queue = QueueConfig() + self.setup() + + def setup(self): + # Ensure the datasets library uses the expected HuggingFace endpoint + datasets.config.HF_ENDPOINT = self.common.hf_endpoint + datasets.config.HUB_DATASETS_URL = self.common.hf_endpoint + "/datasets/{repo_id}/resolve/{revision}/{path}" + # Don't increase the datasets download counts on huggingface.co + datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False + # Set logs from the datasets library to the least verbose + set_verbosity(log_levels["critical"]) diff --git a/workers/first_rows/src/first_rows/constants.py b/workers/first_rows/src/first_rows/constants.py deleted file mode 100644 index f77d7d3f..00000000 --- a/workers/first_rows/src/first_rows/constants.py +++ /dev/null @@ -1,22 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Optional - -DEFAULT_ASSETS_BASE_URL: str = "assets" -DEFAULT_ASSETS_DIRECTORY: None = None -DEFAULT_HF_ENDPOINT: str = "https://huggingface.co" -DEFAULT_HF_TOKEN: Optional[str] = None -DEFAULT_LOG_LEVEL: str = "INFO" -DEFAULT_MAX_JOBS_PER_DATASET: int = 1 -DEFAULT_MAX_LOAD_PCT: int = 70 -DEFAULT_MAX_MEMORY_PCT: int = 80 -DEFAULT_MAX_SIZE_FALLBACK: int = 100_000_000 -DEFAULT_MIN_CELL_BYTES: int = 100 -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache" -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27018" -DEFAULT_ROWS_MAX_BYTES: int = 1_000_000 -DEFAULT_ROWS_MAX_NUMBER: int = 100 -DEFAULT_ROWS_MIN_NUMBER: int = 10 -DEFAULT_WORKER_SLEEP_SECONDS: int = 15 diff --git a/workers/first_rows/src/first_rows/features.py b/workers/first_rows/src/first_rows/features.py index 413f6adf..0008df82 100644 --- a/workers/first_rows/src/first_rows/features.py +++ b/workers/first_rows/src/first_rows/features.py @@ -5 +5 @@ import json -from typing import Any, List, Union +from typing import Any, List, Optional, Union @@ -51,0 +52 @@ def image( + assets_directory: Optional[str], @@ -62,8 +63,9 @@ def image( - dataset, - config, - split, - row_idx, - featureName, - f"{append_hash_suffix('image', json_path)}{ext}", - value, - assets_base_url, + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + column=featureName, + filename=f"{append_hash_suffix('image', json_path)}{ext}", + image=value, + assets_base_url=assets_base_url, + assets_directory=assets_directory, @@ -86,0 +89 @@ def audio( + assets_directory: Optional[str], @@ -102,9 +105,10 @@ def audio( - dataset, - config, - split, - row_idx, - featureName, - array, - sampling_rate, - assets_base_url, - append_hash_suffix("audio", json_path), + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + column=featureName, + array=array, + sampling_rate=sampling_rate, + assets_base_url=assets_base_url, + filename_base=append_hash_suffix("audio", json_path), + assets_directory=assets_directory, @@ -122,0 +127 @@ def get_cell_value( + assets_directory: Optional[str], @@ -129 +134,11 @@ def get_cell_value( - return image(dataset, config, split, row_idx, cell, featureName, assets_base_url, json_path) + return image( + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + value=cell, + featureName=featureName, + assets_base_url=assets_base_url, + assets_directory=assets_directory, + json_path=json_path, + ) @@ -131 +146,11 @@ def get_cell_value( - return audio(dataset, config, split, row_idx, cell, featureName, assets_base_url, json_path) + return audio( + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + value=cell, + featureName=featureName, + assets_base_url=assets_base_url, + assets_directory=assets_directory, + json_path=json_path, + ) @@ -140,9 +165,10 @@ def get_cell_value( - dataset, - config, - split, - row_idx, - subCell, - featureName, - subFieldType, - assets_base_url, - json_path + [idx] if json_path else [idx], + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + cell=subCell, + featureName=featureName, + fieldType=subFieldType, + assets_base_url=assets_base_url, + assets_directory=assets_directory, + json_path=json_path + [idx] if json_path else [idx], @@ -158,9 +184,10 @@ def get_cell_value( - dataset, - config, - split, - row_idx, - subCell, - featureName, - fieldType.feature, - assets_base_url, - json_path + [idx] if json_path else [idx], + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + cell=subCell, + featureName=featureName, + fieldType=fieldType.feature, + assets_base_url=assets_base_url, + assets_directory=assets_directory, + json_path=json_path + [idx] if json_path else [idx], @@ -179,9 +206,10 @@ def get_cell_value( - dataset, - config, - split, - row_idx, - subCellItem, - featureName, - fieldType.feature[key], - assets_base_url, - json_path + [key, idx] if json_path else [key, idx], + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + cell=subCellItem, + featureName=featureName, + fieldType=fieldType.feature[key], + assets_base_url=assets_base_url, + assets_directory=assets_directory, + json_path=json_path + [key, idx] if json_path else [key, idx], @@ -200,9 +228,10 @@ def get_cell_value( - dataset, - config, - split, - row_idx, - subCell, - featureName, - fieldType[key], - assets_base_url, - json_path + [key] if json_path else [key], + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + cell=subCell, + featureName=featureName, + fieldType=fieldType[key], + assets_base_url=assets_base_url, + assets_directory=assets_directory, + json_path=json_path + [key] if json_path else [key], diff --git a/workers/first_rows/src/first_rows/main.py b/workers/first_rows/src/first_rows/main.py index 129923dd..6c89f15f 100644 --- a/workers/first_rows/src/first_rows/main.py +++ b/workers/first_rows/src/first_rows/main.py @@ -5,0 +6 @@ from libcache.simple_cache import connect_to_cache +from libcommon.logger import init_logger @@ -7 +7,0 @@ from libqueue.queue import connect_to_queue -from libutils.logger import init_logger @@ -9,18 +9 @@ from libutils.logger import init_logger -from first_rows.config import ( - ASSETS_BASE_URL, - ASSETS_DIRECTORY, - HF_ENDPOINT, - HF_TOKEN, - LOG_LEVEL, - MAX_JOBS_PER_DATASET, - MAX_LOAD_PCT, - MAX_MEMORY_PCT, - MAX_SIZE_FALLBACK, - MONGO_CACHE_DATABASE, - MONGO_QUEUE_DATABASE, - MONGO_URL, - ROWS_MAX_BYTES, - ROWS_MAX_NUMBER, - ROWS_MIN_NUMBER, - WORKER_SLEEP_SECONDS, -) +from first_rows.config import WorkerConfig @@ -30,17 +13,7 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL) - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) - show_assets_dir(ASSETS_DIRECTORY) - FirstRowsWorker( - assets_base_url=ASSETS_BASE_URL, - hf_endpoint=HF_ENDPOINT, - hf_token=HF_TOKEN, - max_size_fallback=MAX_SIZE_FALLBACK, - rows_max_bytes=ROWS_MAX_BYTES, - rows_max_number=ROWS_MAX_NUMBER, - rows_min_number=ROWS_MIN_NUMBER, - max_jobs_per_dataset=MAX_JOBS_PER_DATASET, - max_load_pct=MAX_LOAD_PCT, - max_memory_pct=MAX_MEMORY_PCT, - sleep_seconds=WORKER_SLEEP_SECONDS, - ).loop() + worker_config = WorkerConfig() + init_logger(worker_config.common.log_level) + connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) + connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.cache.mongo_url) + show_assets_dir(assets_directory=worker_config.cache.assets_directory) + + FirstRowsWorker(worker_config).loop() diff --git a/workers/first_rows/src/first_rows/response.py b/workers/first_rows/src/first_rows/response.py index d450b0b0..f5b9e8fb 100644 --- a/workers/first_rows/src/first_rows/response.py +++ b/workers/first_rows/src/first_rows/response.py @@ -20 +20 @@ from huggingface_hub.hf_api import HfApi, RepositoryNotFoundError # type: ignor -from libutils.utils import orjson_dumps +from libcommon.utils import orjson_dumps @@ -22,2 +21,0 @@ from libutils.utils import orjson_dumps -from first_rows.config import MIN_CELL_BYTES -from first_rows.constants import DEFAULT_ROWS_MAX_BYTES, DEFAULT_ROWS_MAX_NUMBER @@ -110 +108 @@ def truncate_cell(cell: Any, min_cell_bytes: int) -> str: -def truncate_row_item(row_item: RowItem) -> RowItem: +def truncate_row_item(row_item: RowItem, min_cell_bytes: int) -> RowItem: @@ -115 +113 @@ def truncate_row_item(row_item: RowItem) -> RowItem: - if cell_bytes > MIN_CELL_BYTES: + if cell_bytes > min_cell_bytes: @@ -117 +115 @@ def truncate_row_item(row_item: RowItem) -> RowItem: - row[column_name] = truncate_cell(cell, MIN_CELL_BYTES) + row[column_name] = truncate_cell(cell=cell, min_cell_bytes=min_cell_bytes) @@ -125 +123 @@ def truncate_row_item(row_item: RowItem) -> RowItem: -def truncate_row_items(row_items: List[RowItem], rows_max_bytes: int) -> List[RowItem]: +def truncate_row_items(row_items: List[RowItem], min_cell_bytes: int, rows_max_bytes: int) -> List[RowItem]: @@ -134 +132 @@ def truncate_row_items(row_items: List[RowItem], rows_max_bytes: int) -> List[Ro - row_item = truncate_row_item(row_item) + row_item = truncate_row_item(row_item=row_item, min_cell_bytes=min_cell_bytes) @@ -154,0 +153 @@ def create_truncated_row_items( + min_cell_bytes: int, @@ -178 +177 @@ def create_truncated_row_items( - return truncate_row_items(row_items, rows_max_bytes) + return truncate_row_items(row_items=row_items, min_cell_bytes=min_cell_bytes, rows_max_bytes=rows_max_bytes) @@ -196 +195,7 @@ def transform_rows( - dataset: str, config: str, split: str, rows: List[Row], features: Features, assets_base_url: str + dataset: str, + config: str, + split: str, + rows: List[Row], + features: Features, + assets_base_url: str, + assets_directory: Optional[str], @@ -201,8 +206,9 @@ def transform_rows( - dataset, - config, - split, - row_idx, - row[featureName] if featureName in row else None, - featureName, - fieldType, - assets_base_url, + dataset=dataset, + config=config, + split=split, + row_idx=row_idx, + cell=row[featureName] if featureName in row else None, + featureName=featureName, + fieldType=fieldType, + assets_base_url=assets_base_url, + assets_directory=assets_directory, @@ -245,2 +251,2 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - for config in get_dataset_config_names(dataset, use_auth_token=use_auth_token) - for split in get_dataset_split_names(dataset, config, use_auth_token=use_auth_token) + for config in get_dataset_config_names(path=dataset, use_auth_token=use_auth_token) + for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) @@ -256,5 +262,7 @@ def get_first_rows_response( - hf_token: Optional[str] = None, - max_size_fallback: Optional[int] = None, - rows_max_bytes: Optional[int] = None, - rows_max_number: Optional[int] = None, - rows_min_number: Optional[int] = None, + hf_token: Optional[str], + min_cell_bytes: int, + max_size_fallback: int, + rows_max_bytes: int, + rows_max_number: int, + rows_min_number: int, + assets_directory: Optional[str], @@ -277 +285 @@ def get_first_rows_response( - hf_token (`str`, *optional*): + hf_token (`str` or `None`): @@ -279,9 +287,8 @@ def get_first_rows_response( - max_size_fallback (`int`, *optional*): - The maximum number of bytes of the split to fallback to normal mode if the streaming mode fails. If None, - it will not fallback to normal mode. Defaults to None. - rows_max_bytes (`int`, *optional*): - The maximum number of bytes of the response (else, the response is truncated). Defaults to 1_000_000 bytes. - rows_max_number (`int`, *optional*): - The maximum number of rows of the response. Defaults to 100. - rows_min_number (`int`, *optional*): - The minimum number of rows of the response. Defaults to 0. + max_size_fallback (`int`): + The maximum number of bytes of the split to fallback to normal mode if the streaming mode fails. + rows_max_bytes (`int`): + The maximum number of bytes of the response (else, the response is truncated). + rows_max_number (`int`): + The maximum number of rows of the response. + rows_min_number (`int`): + The minimum number of rows of the response. @@ -313,6 +319,0 @@ def get_first_rows_response( - if rows_max_bytes is None: - rows_max_bytes = DEFAULT_ROWS_MAX_BYTES - if rows_max_number is None: - rows_max_number = DEFAULT_ROWS_MAX_NUMBER - if rows_min_number is None: - rows_min_number = 0 @@ -321 +322 @@ def get_first_rows_response( - HfApi(endpoint=hf_endpoint).dataset_info(dataset, use_auth_token=use_auth_token) + HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) @@ -326 +327 @@ def get_first_rows_response( - split_full_names = get_dataset_split_full_names(dataset, use_auth_token) + split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) @@ -356 +357 @@ def get_first_rows_response( - dataset, + path=dataset, @@ -375 +376,6 @@ def get_first_rows_response( - dataset, config, split, streaming=True, rows_max_number=rows_max_number, use_auth_token=use_auth_token + dataset=dataset, + config=config, + split=split, + streaming=True, + rows_max_number=rows_max_number, + use_auth_token=use_auth_token, @@ -378 +384 @@ def get_first_rows_response( - if max_size_fallback is None or info.size_in_bytes is None or info.size_in_bytes > max_size_fallback: + if info.size_in_bytes is None or info.size_in_bytes > max_size_fallback: @@ -385,3 +391,3 @@ def get_first_rows_response( - dataset, - config, - split, + dataset=dataset, + config=config, + split=split, @@ -399 +405,9 @@ def get_first_rows_response( - transformed_rows = transform_rows(dataset, config, split, rows, features, assets_base_url) + transformed_rows = transform_rows( + dataset=dataset, + config=config, + split=split, + rows=rows, + features=features, + assets_base_url=assets_base_url, + assets_directory=assets_directory, + ) @@ -406 +420,9 @@ def get_first_rows_response( - row_items = create_truncated_row_items(dataset, config, split, transformed_rows, rows_max_bytes, rows_min_number) + row_items = create_truncated_row_items( + dataset=dataset, + config=config, + split=split, + rows=transformed_rows, + min_cell_bytes=min_cell_bytes, + rows_max_bytes=rows_max_bytes, + rows_min_number=rows_min_number, + ) diff --git a/workers/first_rows/src/first_rows/utils.py b/workers/first_rows/src/first_rows/utils.py index 660d0d29..afc2e919 100644 --- a/workers/first_rows/src/first_rows/utils.py +++ b/workers/first_rows/src/first_rows/utils.py @@ -10,0 +11 @@ from typing import Literal, Optional +from libcommon.exceptions import CustomError @@ -12 +12,0 @@ from libqueue.queue import Queue -from libutils.exceptions import CustomError diff --git a/workers/first_rows/src/first_rows/worker.py b/workers/first_rows/src/first_rows/worker.py index 7571c9c4..bba181e3 100644 --- a/workers/first_rows/src/first_rows/worker.py +++ b/workers/first_rows/src/first_rows/worker.py @@ -10,0 +11 @@ from libqueue.worker import Worker +from first_rows.config import WorkerConfig @@ -25,7 +26 @@ class FirstRowsWorker(Worker): - assets_base_url: str - hf_endpoint: str - hf_token: Optional[str] - max_size_fallback: Optional[int] - rows_max_bytes: Optional[int] - rows_max_number: Optional[int] - rows_min_number: Optional[int] + config: WorkerConfig @@ -33,27 +28,4 @@ class FirstRowsWorker(Worker): - def __init__( - self, - assets_base_url: str, - hf_endpoint: str, - hf_token: Optional[str] = None, - max_size_fallback: Optional[int] = None, - rows_max_bytes: Optional[int] = None, - rows_max_number: Optional[int] = None, - rows_min_number: Optional[int] = None, - max_jobs_per_dataset: Optional[int] = None, - sleep_seconds: Optional[int] = None, - max_memory_pct: Optional[int] = None, - max_load_pct: Optional[int] = None, - ): - super().__init__( - sleep_seconds=sleep_seconds, - max_memory_pct=max_memory_pct, - max_load_pct=max_load_pct, - ) - self._queues = Queues(max_jobs_per_dataset=max_jobs_per_dataset) - self.assets_base_url = assets_base_url - self.hf_endpoint = hf_endpoint - self.hf_token = hf_token - self.max_size_fallback = max_size_fallback - self.rows_max_bytes = rows_max_bytes - self.rows_max_number = rows_max_number - self.rows_min_number = rows_min_number + def __init__(self, worker_config: WorkerConfig): + super().__init__(queue_config=worker_config.queue) + self._queues = Queues(max_jobs_per_dataset=worker_config.queue.max_jobs_per_dataset) + self.config = worker_config @@ -75,10 +47,19 @@ class FirstRowsWorker(Worker): - dataset, - config, - split, - assets_base_url=self.assets_base_url, - hf_endpoint=self.hf_endpoint, - hf_token=self.hf_token, - max_size_fallback=self.max_size_fallback, - rows_max_bytes=self.rows_max_bytes, - rows_max_number=self.rows_max_number, - rows_min_number=self.rows_min_number, + dataset=dataset, + config=config, + split=split, + assets_base_url=self.config.common.assets_base_url, + hf_endpoint=self.config.common.hf_endpoint, + hf_token=self.config.common.hf_token, + min_cell_bytes=self.config.first_rows.min_cell_bytes, + max_size_fallback=self.config.first_rows.fallback_max_dataset_size, + rows_max_bytes=self.config.first_rows.max_bytes, + rows_max_number=self.config.first_rows.max_number, + rows_min_number=self.config.first_rows.min_number, + assets_directory=self.config.cache.assets_directory, + ) + upsert_first_rows_response( + dataset_name=dataset, + config_name=config, + split_name=split, + response=dict(response), + http_status=HTTPStatus.OK, @@ -86 +66,0 @@ class FirstRowsWorker(Worker): - upsert_first_rows_response(dataset, config, split, dict(response), HTTPStatus.OK) @@ -96,7 +76,7 @@ class FirstRowsWorker(Worker): - dataset, - config, - split, - dict(err.as_response()), - err.status_code, - err.code, - dict(err.as_response_with_cause()), + dataset_name=dataset, + config_name=config, + split_name=split, + response=dict(err.as_response()), + http_status=err.status_code, + error_code=err.code, + details=dict(err.as_response_with_cause()), @@ -111,7 +91,7 @@ class FirstRowsWorker(Worker): - dataset, - config, - split, - dict(e.as_response()), - e.status_code, - e.code, - dict(e.as_response_with_cause()), + dataset_name=dataset, + config_name=config, + split_name=split, + response=dict(e.as_response()), + http_status=e.status_code, + error_code=e.code, + details=dict(e.as_response_with_cause()), diff --git a/workers/first_rows/tests/conftest.py b/workers/first_rows/tests/conftest.py index 5010cf61..342de477 100644 --- a/workers/first_rows/tests/conftest.py +++ b/workers/first_rows/tests/conftest.py @@ -4 +4 @@ -import os +from pytest import MonkeyPatch, fixture @@ -6 +6 @@ import os -from .utils import HF_ENDPOINT +from first_rows.config import WorkerConfig @@ -12 +12,21 @@ pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixt -os.environ["HF_ENDPOINT"] = HF_ENDPOINT +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(hf_endpoint: str, hf_token: str): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + monkeypatch_session.setenv("COMMON_HF_ENDPOINT", hf_endpoint) + monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) + monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) + monkeypatch_session.setenv("COMMON_ASSETS_BASE_URL", "http://localhost/assets") + monkeypatch_session.setenv("FIRST_ROWS_MAX_NUMBER", "7") + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session") +def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: + worker_config = WorkerConfig() + if "test" not in worker_config.cache.mongo_database or "test" not in worker_config.queue.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return worker_config diff --git a/workers/first_rows/tests/fixtures/hub.py b/workers/first_rows/tests/fixtures/hub.py index 019f81f1..8a40c2a7 100644 --- a/workers/first_rows/tests/fixtures/hub.py +++ b/workers/first_rows/tests/fixtures/hub.py @@ -10,0 +11 @@ from typing import Any, Dict, Iterable, List, Optional, TypedDict +import datasets.config @@ -29,0 +31,6 @@ CI_HUB_DATASETS_URL = CI_HUB_ENDPOINT + "/datasets/{repo_id}/resolve/{revision}/ +# Ensure the datasets library uses the expected HuggingFace endpoint +datasets.config.HF_ENDPOINT = CI_HUB_ENDPOINT +datasets.config.HUB_DATASETS_URL = CI_HUB_DATASETS_URL +# Don't increase the datasets download counts on huggingface.co +datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False + @@ -109,0 +117,5 @@ def hf_token() -> str: [email protected](scope="session") +def hf_endpoint() -> str: + return CI_HUB_ENDPOINT + + diff --git a/workers/first_rows/tests/test_features.py b/workers/first_rows/tests/test_features.py index 943d8415..7cf6dd8f 100644 --- a/workers/first_rows/tests/test_features.py +++ b/workers/first_rows/tests/test_features.py @@ -11,0 +12 @@ from datasets import Audio, Dataset, Image, Value +from first_rows.config import WorkerConfig @@ -14,2 +14,0 @@ from first_rows.features import get_cell_value -from .utils import ASSETS_BASE_URL - @@ -55 +54,3 @@ from .utils import ASSETS_BASE_URL -def test_value(dataset_type, output_value, output_dtype, datasets) -> None: +def test_value( + dataset_type: str, output_value: Any, output_dtype: str, datasets: Dict[str, Dataset], worker_config: WorkerConfig +) -> None: @@ -60 +61,11 @@ def test_value(dataset_type, output_value, output_dtype, datasets) -> None: - value = get_cell_value("dataset", "config", "split", 7, dataset[0]["col"], "col", feature, ASSETS_BASE_URL) + value = get_cell_value( + dataset="dataset", + config="config", + split="split", + row_idx=7, + cell=dataset[0]["col"], + featureName="col", + fieldType=feature, + assets_base_url=worker_config.common.assets_base_url, + assets_directory=worker_config.cache.assets_directory, + ) @@ -280 +291,3 @@ def test_value(dataset_type, output_value, output_dtype, datasets) -> None: -def test_others(dataset_type: str, output_value: Any, output_type: Any, datasets: Dict[str, Dataset]) -> None: +def test_others( + dataset_type: str, output_value: Any, output_type: Any, datasets: Dict[str, Dataset], worker_config: WorkerConfig +) -> None: @@ -287 +300,11 @@ def test_others(dataset_type: str, output_value: Any, output_type: Any, datasets - value = get_cell_value("dataset", "config", "split", 7, dataset[0]["col"], "col", feature, ASSETS_BASE_URL) + value = get_cell_value( + dataset="dataset", + config="config", + split="split", + row_idx=7, + cell=dataset[0]["col"], + featureName="col", + fieldType=feature, + assets_base_url=worker_config.common.assets_base_url, + assets_directory=worker_config.cache.assets_directory, + ) diff --git a/workers/first_rows/tests/test_response.py b/workers/first_rows/tests/test_response.py index 3544c879..205deb0b 100644 --- a/workers/first_rows/tests/test_response.py +++ b/workers/first_rows/tests/test_response.py @@ -6 +6 @@ from datasets.packaged_modules import csv -from libutils.exceptions import CustomError +from libcommon.exceptions import CustomError @@ -7,0 +8 @@ from libutils.exceptions import CustomError +from first_rows.config import WorkerConfig @@ -11 +12 @@ from .fixtures.hub import HubDatasets -from .utils import ASSETS_BASE_URL, HF_ENDPOINT, HF_TOKEN, get_default_config_split +from .utils import get_default_config_split @@ -35,0 +37 @@ def test_number_rows( + worker_config: WorkerConfig, @@ -45 +46,0 @@ def test_number_rows( - rows_max_number = 7 @@ -52,4 +53,9 @@ def test_number_rows( - assets_base_url=ASSETS_BASE_URL, - hf_endpoint=HF_ENDPOINT, - hf_token=HF_TOKEN if use_token else None, - rows_max_number=rows_max_number, + assets_base_url=worker_config.common.assets_base_url, + hf_endpoint=worker_config.common.hf_endpoint, + hf_token=worker_config.common.hf_token if use_token else None, + max_size_fallback=worker_config.first_rows.fallback_max_dataset_size, + rows_max_number=worker_config.first_rows.max_number, + rows_min_number=worker_config.first_rows.min_number, + rows_max_bytes=worker_config.first_rows.max_bytes, + min_cell_bytes=worker_config.first_rows.min_cell_bytes, + assets_directory=worker_config.cache.assets_directory, @@ -64,4 +70,9 @@ def test_number_rows( - assets_base_url=ASSETS_BASE_URL, - hf_endpoint=HF_ENDPOINT, - hf_token=HF_TOKEN if use_token else None, - rows_max_number=rows_max_number, + assets_base_url=worker_config.common.assets_base_url, + hf_endpoint=worker_config.common.hf_endpoint, + hf_token=worker_config.common.hf_token if use_token else None, + max_size_fallback=worker_config.first_rows.fallback_max_dataset_size, + rows_max_number=worker_config.first_rows.max_number, + rows_min_number=worker_config.first_rows.min_number, + rows_max_bytes=worker_config.first_rows.max_bytes, + min_cell_bytes=worker_config.first_rows.min_cell_bytes, + assets_directory=worker_config.cache.assets_directory, diff --git a/workers/first_rows/tests/test_worker.py b/workers/first_rows/tests/test_worker.py index 48c1785c..d8d2aff8 100644 --- a/workers/first_rows/tests/test_worker.py +++ b/workers/first_rows/tests/test_worker.py @@ -13,13 +13 @@ from libqueue.queue import _clean_queue_database, connect_to_queue -from first_rows.config import ( - ASSETS_BASE_URL, - HF_ENDPOINT, - HF_TOKEN, - MAX_JOBS_PER_DATASET, - MAX_LOAD_PCT, - MAX_MEMORY_PCT, - MAX_SIZE_FALLBACK, - ROWS_MAX_BYTES, - ROWS_MAX_NUMBER, - ROWS_MIN_NUMBER, - WORKER_SLEEP_SECONDS, -) +from first_rows.config import WorkerConfig @@ -28,6 +16 @@ from first_rows.worker import FirstRowsWorker -from .utils import ( - MONGO_CACHE_DATABASE, - MONGO_QUEUE_DATABASE, - MONGO_URL, - get_default_config_split, -) +from .utils import get_default_config_split @@ -37,9 +20,3 @@ from .utils import ( -def safe_guard() -> None: - if "test" not in MONGO_CACHE_DATABASE: - raise ValueError("Test must be launched on a test mongo database") - - [email protected](autouse=True, scope="module") -def client() -> None: - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) +def client(worker_config: WorkerConfig) -> None: + connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) + connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.queue.mongo_url) @@ -55,14 +32,2 @@ def clean_mongo_database() -> None: -def worker() -> FirstRowsWorker: - return FirstRowsWorker( - assets_base_url=ASSETS_BASE_URL, - hf_endpoint=HF_ENDPOINT, - hf_token=HF_TOKEN, - max_size_fallback=MAX_SIZE_FALLBACK, - rows_max_bytes=ROWS_MAX_BYTES, - rows_max_number=ROWS_MAX_NUMBER, - rows_min_number=ROWS_MIN_NUMBER, - max_jobs_per_dataset=MAX_JOBS_PER_DATASET, - max_load_pct=MAX_LOAD_PCT, - max_memory_pct=MAX_MEMORY_PCT, - sleep_seconds=WORKER_SLEEP_SECONDS, - ) +def worker(worker_config: WorkerConfig) -> FirstRowsWorker: + return FirstRowsWorker(worker_config) diff --git a/workers/first_rows/tests/utils.py b/workers/first_rows/tests/utils.py index 2b61cc27..4e3fdff0 100644 --- a/workers/first_rows/tests/utils.py +++ b/workers/first_rows/tests/utils.py @@ -4 +3,0 @@ -import os @@ -7,18 +5,0 @@ from typing import Tuple -from libutils.utils import get_int_value, get_str_value - -DEFAULT_ASSETS_BASE_URL: str = "http://localhost/assets" -DEFAULT_HF_ENDPOINT: str = "https://huggingface.co" -DEFAULT_HF_TOKEN: str = "" -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache_test" -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue_test" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27017" -DEFAULT_ROWS_MAX_NUMBER: int = 5 - -ASSETS_BASE_URL = get_str_value(d=os.environ, key="ASSETS_BASE_URL", default=DEFAULT_ASSETS_BASE_URL) -HF_ENDPOINT = get_str_value(d=os.environ, key="HF_ENDPOINT", default=DEFAULT_HF_ENDPOINT) -HF_TOKEN = get_str_value(d=os.environ, key="HF_TOKEN", default=DEFAULT_HF_TOKEN) -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) -ROWS_MAX_NUMBER = get_int_value(d=os.environ, key="ROWS_MAX_NUMBER", default=DEFAULT_ROWS_MAX_NUMBER) - diff --git a/workers/splits/Dockerfile b/workers/splits/Dockerfile index b3ea1aa2..f646a6f1 100644 --- a/workers/splits/Dockerfile +++ b/workers/splits/Dockerfile @@ -28 +28 @@ COPY libs/libqueue/dist ./libs/libqueue/dist -COPY libs/libutils/dist ./libs/libutils/dist +COPY libs/libcommon/dist ./libs/libcommon/dist diff --git a/workers/splits/Makefile b/workers/splits/Makefile index 0f146557..1abf7176 100644 --- a/workers/splits/Makefile +++ b/workers/splits/Makefile @@ -2,7 +2,4 @@ -export TEST_MONGO_PORT := 27040 -export TEST_MONGO_CACHE_DATABASE := datasets_server_cache_test -export TEST_MONGO_QUEUE_DATABASE := datasets_server_queue_test -export TEST_ROWS_MAX_NUMBER := 5 -export TEST_COMPOSE_PROJECT_NAME := splits -export TEST_HF_ENDPOINT := https://hub-ci.huggingface.co -export TEST_HF_TOKEN := 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 +export COMPOSE_PROJECT_NAME := splits +export MONGO_PORT := 27040 +export CACHE_MONGO_URL := mongodb://localhost:${MONGO_PORT} +export QUEUE_MONGO_URL := mongodb://localhost:${MONGO_PORT} @@ -10 +7 @@ export TEST_HF_TOKEN := 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 -TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml +DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml @@ -13 +9,0 @@ TEST_DOCKER_COMPOSE := ../../tools/docker-compose-mongo.yml -# Ensure to specify HF_TOKEN when calling make test, ie HF_TOKEN=hf_app_xxx make test diff --git a/workers/splits/README.md b/workers/splits/README.md index 87c5aa77..f87fd5f4 100644 --- a/workers/splits/README.md +++ b/workers/splits/README.md @@ -7 +7,5 @@ -Set environment variables to configure the following aspects: +The worker con be configured using environment variables. They are grouped by scope. + +### Datasets library + +The following environment variables are used to configure two dependencies: the `datasets` and `numba` libraries: @@ -11,9 +14,0 @@ Set environment variables to configure the following aspects: -- `HF_ENDPOINT`: URL of the HuggingFace Hub. Defaults to `https://huggingface.co`. -- `HF_TOKEN`: App Access Token (ask moonlanding administrators to get one, only the `read` role is required), to access the gated datasets. Defaults to empty. -- `LOG_LEVEL`: log level, among `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. Defaults to `INFO`. -- `MAX_JOBS_PER_DATASET`: the maximum number of started jobs for the same dataset. Defaults to 1. -- `MAX_LOAD_PCT`: the maximum load of the machine (in percentage: the max between the 1m load and the 5m load divided by the number of cpus \*100) allowed to start a job. Set to 0 to disable the test. Defaults to 70. -- `MAX_MEMORY_PCT`: the maximum memory (RAM + SWAP) usage of the machine (in percentage) allowed to start a job. Set to 0 to disable the test. Defaults to 80. -- `MONGO_CACHE_DATABASE`: the name of the database used for storing the cache. Defaults to `"datasets_server_cache"`. -- `MONGO_QUEUE_DATABASE`: the name of the database used for storing the queue. Defaults to `"datasets_server_queue"`. -- `MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`. @@ -21 +16,12 @@ Set environment variables to configure the following aspects: -- `WORKER_SLEEP_SECONDS`: duration in seconds of a worker wait loop iteration, before checking if resources are available and processing a job if any is available. Note that the worker does not sleep on the first loop after finishing a job. Defaults to `15`. + +### Cache + +See [../../libs/libcache/README.md](../../libs/libcache/README.md) for more information about the cache configuration. + +### Queue + +See [../../libs/libqueue/README.md](../../libs/libqueue/README.md) for more information about the queue configuration. + +### Common + +See [../../libs/libcommon/README.md](../../libs/libcommon/README.md) for more information about the common configuration. diff --git a/workers/splits/poetry.lock b/workers/splits/poetry.lock index 1d27e81f..4481f671 100644 --- a/workers/splits/poetry.lock +++ b/workers/splits/poetry.lock @@ -40,17 +39,0 @@ frozenlist = ">=1.1.0" -[[package]] -name = "anyio" -version = "3.6.1" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16)"] - @@ -297 +280 @@ name = "cloudpickle" -version = "2.1.0" +version = "2.2.0" @@ -376 +359 @@ benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "tr -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] +dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] @@ -382 +365 @@ tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[s3,server] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] +tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (>=2.0.1)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "faiss-cpu (>=1.6.4)", "fsspec", "lz4", "moto[server,s3] (==2.0.4)", "py7zr", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio (<0.12.0)", "soundfile", "transformers", "zstandard", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "sqlalchemy", "tldextract", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa"] @@ -440,0 +424,18 @@ conda = ["pyyaml"] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] + @@ -506 +507 @@ name = "fsspec" -version = "2022.8.2" +version = "2022.10.0" @@ -549 +550 @@ name = "gdown" -version = "4.5.1" +version = "4.5.3" @@ -586 +587 @@ name = "google-auth" -version = "2.12.0" +version = "2.13.0" @@ -822 +823 @@ name = "libcache" -version = "0.2.1" +version = "0.3.0" @@ -829,0 +831 @@ appdirs = ">=1.4.4,<2.0.0" +environs = ">=9.5.0,<10.0.0" @@ -836 +838 @@ type = "file" -url = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl" +url = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl" @@ -845,0 +848,16 @@ python-versions = "*" +[[package]] +name = "libcommon" +version = "0.3.1" +description = "Library for utils, common to all the services and workers" +category = "main" +optional = false +python-versions = "==3.9.6" + +[package.dependencies] +environs = ">=9.5.0,<10.0.0" +orjson = ">=3.6.4,<4.0.0" + +[package.source] +type = "file" +url = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl" + @@ -848 +866 @@ name = "libqueue" -version = "0.3.2" +version = "0.4.1" @@ -854,0 +873 @@ python-versions = "==3.9.6" +environs = ">=9.5.0,<10.0.0" @@ -862 +881 @@ type = "file" -url = "../../libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl" +url = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl" @@ -890,16 +908,0 @@ tests = ["matplotlib (>=3.3.0)", "pytest-mpl", "pytest-cov", "pytest", "contextl -[[package]] -name = "libutils" -version = "0.2.0" -description = "Library for utils" -category = "main" -optional = false -python-versions = "==3.9.6" - -[package.dependencies] -orjson = ">=3.6.4,<4.0.0" -starlette = ">=0.16.0,<0.17.0" - -[package.source] -type = "file" -url = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl" - @@ -962,0 +966,17 @@ python-versions = ">=3.7" +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] +lint = ["mypy (==0.971)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + @@ -1118 +1138 @@ name = "oauthlib" -version = "3.2.1" +version = "3.2.2" @@ -1539,0 +1560,11 @@ six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + @@ -1542 +1573 @@ name = "pytz" -version = "2022.4" +version = "2022.5" @@ -1762,8 +1792,0 @@ python-versions = ">=3.6" -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "main" -optional = false -python-versions = ">=3.7" - @@ -1792,14 +1814,0 @@ python-versions = ">=3.6" -[[package]] -name = "starlette" -version = "0.16.0" -description = "The little ASGI library that shines." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -anyio = ">=3.0.0,<4" - -[package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests", "graphene"] - @@ -2170 +2179 @@ name = "types-urllib3" -version = "1.26.25" +version = "1.26.25.1" @@ -2284 +2293 @@ python-versions = "3.9.6" -content-hash = "79cc470566eb3d8ef81f1f1239de17211b4b2f139951122bb90c9c574cfef35d" +content-hash = "7205adaa92829e625bdb89fa7a67c192aaaadd242cca0d45cb2cf5aed455631d" @@ -2293,4 +2301,0 @@ aiosignal = [ -anyio = [ - {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, - {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, -] @@ -2435,4 +2440 @@ click = [ -cloudpickle = [ - {file = "cloudpickle-2.1.0-py3-none-any.whl", hash = "sha256:b5c434f75c34624eedad3a14f2be5ac3b5384774d5b0e3caf905c21479e6c4b1"}, - {file = "cloudpickle-2.1.0.tar.gz", hash = "sha256:bb233e876a58491d9590a676f93c7a5473a08f747d5ab9df7f9ce564b3e7938e"}, -] +cloudpickle = [] @@ -2535,0 +2538 @@ dparse = [] +environs = [] @@ -2620 +2623 @@ libcache = [ - {file = "libcache-0.2.1-py3-none-any.whl", hash = "sha256:62c57b8e12a70241106cd9bcc7b845b40ba5ff9dd6423691de269a42f507943f"}, + {file = "libcache-0.3.0-py3-none-any.whl", hash = "sha256:dcfe41d72e7d69b131f9f1f43ed1c6fbcc6cdfe9e8607fd4f5ac211548e74378"}, @@ -2622,0 +2626,3 @@ libclang = [] +libcommon = [ + {file = "libcommon-0.3.1-py3-none-any.whl", hash = "sha256:0a7c58ef9f4b69ca8ced5c9a0e8e21956b4e4c5f671dcdcc6c33c7123f630caa"}, +] @@ -2624 +2630 @@ libqueue = [ - {file = "libqueue-0.3.2-py3-none-any.whl", hash = "sha256:1655472f2713ad5f89f819bf513aaf4ec6b6fe03d2858255136e5e2971a6c22f"}, + {file = "libqueue-0.4.1-py3-none-any.whl", hash = "sha256:b94d97b3842e5e54b5b2da5cd77f4b5931bdd7980e61c261ebfb8c1a1c8eba7b"}, @@ -2627,3 +2632,0 @@ librosa = [] -libutils = [ - {file = "libutils-0.2.0-py3-none-any.whl", hash = "sha256:a562dd39d4b3c5ab20bb11354e8eaf582d873f0367996df9a4c3c00609f608da"}, -] @@ -2637,0 +2641 @@ markupsafe = [] +marshmallow = [] @@ -3040,0 +3045 @@ python-dateutil = [ +python-dotenv = [] @@ -3113 +3117,0 @@ smmap = [ -sniffio = [] @@ -3119,4 +3122,0 @@ soupsieve = [ -starlette = [ - {file = "starlette-0.16.0-py3-none-any.whl", hash = "sha256:38eb24bf705a2c317e15868e384c1b8a12ca396e5a3c3a003db7e667c43f939f"}, - {file = "starlette-0.16.0.tar.gz", hash = "sha256:e1904b5d0007aee24bdd3c43994be9b3b729f4f58e740200de1d623f8c3a8870"}, -] diff --git a/workers/splits/pyproject.toml b/workers/splits/pyproject.toml index 7edc2462..14676c6f 100644 --- a/workers/splits/pyproject.toml +++ b/workers/splits/pyproject.toml @@ -19,3 +19,3 @@ kss = "^2.6.0" -libcache = { path = "../../libs/libcache/dist/libcache-0.2.1-py3-none-any.whl", develop = false } -libqueue = { path = "../../libs/libqueue/dist/libqueue-0.3.2-py3-none-any.whl", develop = false } -libutils = { path = "../../libs/libutils/dist/libutils-0.2.0-py3-none-any.whl", develop = false } +libcache = { path = "../../libs/libcache/dist/libcache-0.3.0-py3-none-any.whl", develop = false } +libcommon = { path = "../../libs/libcommon/dist/libcommon-0.3.1-py3-none-any.whl", develop = false } +libqueue = { path = "../../libs/libqueue/dist/libqueue-0.4.1-py3-none-any.whl", develop = false } diff --git a/workers/splits/src/splits/config.py b/workers/splits/src/splits/config.py index 437d4376..c8d93404 100644 --- a/workers/splits/src/splits/config.py +++ b/workers/splits/src/splits/config.py @@ -4,2 +3,0 @@ -import os - @@ -8 +6,4 @@ from datasets.utils.logging import log_levels, set_verbosity -from libutils.utils import get_int_value, get_str_or_none_value, get_str_value +from libcache.config import CacheConfig +from libcommon.config import CommonConfig +from libqueue.config import QueueConfig + @@ -10,12 +11,4 @@ from libutils.utils import get_int_value, get_str_or_none_value, get_str_value -from splits.constants import ( - DEFAULT_HF_ENDPOINT, - DEFAULT_HF_TOKEN, - DEFAULT_LOG_LEVEL, - DEFAULT_MAX_JOBS_PER_DATASET, - DEFAULT_MAX_LOAD_PCT, - DEFAULT_MAX_MEMORY_PCT, - DEFAULT_MONGO_CACHE_DATABASE, - DEFAULT_MONGO_QUEUE_DATABASE, - DEFAULT_MONGO_URL, - DEFAULT_WORKER_SLEEP_SECONDS, -) +class WorkerConfig: + cache: CacheConfig + common: CommonConfig + queue: QueueConfig @@ -23,10 +16,5 @@ from splits.constants import ( -HF_ENDPOINT = get_str_value(d=os.environ, key="HF_ENDPOINT", default=DEFAULT_HF_ENDPOINT) -HF_TOKEN = get_str_or_none_value(d=os.environ, key="HF_TOKEN", default=DEFAULT_HF_TOKEN) -LOG_LEVEL = get_str_value(d=os.environ, key="LOG_LEVEL", default=DEFAULT_LOG_LEVEL) -MAX_JOBS_PER_DATASET = get_int_value(os.environ, "MAX_JOBS_PER_DATASET", DEFAULT_MAX_JOBS_PER_DATASET) -MAX_LOAD_PCT = get_int_value(os.environ, "MAX_LOAD_PCT", DEFAULT_MAX_LOAD_PCT) -MAX_MEMORY_PCT = get_int_value(os.environ, "MAX_MEMORY_PCT", DEFAULT_MAX_MEMORY_PCT) -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) -WORKER_SLEEP_SECONDS = get_int_value(os.environ, "WORKER_SLEEP_SECONDS", DEFAULT_WORKER_SLEEP_SECONDS) + def __init__(self): + self.cache = CacheConfig() + self.common = CommonConfig() + self.queue = QueueConfig() + self.setup() @@ -34,6 +22,8 @@ WORKER_SLEEP_SECONDS = get_int_value(os.environ, "WORKER_SLEEP_SECONDS", DEFAULT -# Ensure the datasets library uses the expected HuggingFace endpoint -datasets.config.HF_ENDPOINT = HF_ENDPOINT -# Don't increase the datasets download counts on huggingface.co -datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False -# Set logs from the datasets library to the least verbose -set_verbosity(log_levels["critical"]) + def setup(self): + # Ensure the datasets library uses the expected HuggingFace endpoint + datasets.config.HF_ENDPOINT = self.common.hf_endpoint + datasets.config.HUB_DATASETS_URL = self.common.hf_endpoint + "/datasets/{repo_id}/resolve/{revision}/{path}" + # Don't increase the datasets download counts on huggingface.co + datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False + # Set logs from the datasets library to the least verbose + set_verbosity(log_levels["critical"]) diff --git a/workers/splits/src/splits/constants.py b/workers/splits/src/splits/constants.py deleted file mode 100644 index 3881fca9..00000000 --- a/workers/splits/src/splits/constants.py +++ /dev/null @@ -1,16 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 The HuggingFace Authors. - -from typing import Optional - -DEFAULT_HF_ENDPOINT: str = "https://huggingface.co" -DEFAULT_HF_TOKEN: Optional[str] = None -DEFAULT_LOG_LEVEL: str = "INFO" -DEFAULT_MAX_JOBS_PER_DATASET: int = 1 -DEFAULT_MAX_LOAD_PCT: int = 70 -DEFAULT_MAX_MEMORY_PCT: int = 80 -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache" -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27018" -DEFAULT_WORKER_SLEEP_SECONDS: int = 15 -DEFAULT_WORKER_QUEUE: str = "splits_responses" diff --git a/workers/splits/src/splits/main.py b/workers/splits/src/splits/main.py index 08042af3..978998d6 100644 --- a/workers/splits/src/splits/main.py +++ b/workers/splits/src/splits/main.py @@ -4,0 +5 @@ from libcache.simple_cache import connect_to_cache +from libcommon.logger import init_logger @@ -6 +6,0 @@ from libqueue.queue import connect_to_queue -from libutils.logger import init_logger @@ -8,12 +8 @@ from libutils.logger import init_logger -from splits.config import ( - HF_ENDPOINT, - HF_TOKEN, - LOG_LEVEL, - MAX_JOBS_PER_DATASET, - MAX_LOAD_PCT, - MAX_MEMORY_PCT, - MONGO_CACHE_DATABASE, - MONGO_QUEUE_DATABASE, - MONGO_URL, - WORKER_SLEEP_SECONDS, -) +from splits.config import WorkerConfig @@ -23,11 +12,6 @@ if __name__ == "__main__": - init_logger(LOG_LEVEL) - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) - SplitsWorker( - hf_endpoint=HF_ENDPOINT, - hf_token=HF_TOKEN, - max_jobs_per_dataset=MAX_JOBS_PER_DATASET, - max_load_pct=MAX_LOAD_PCT, - max_memory_pct=MAX_MEMORY_PCT, - sleep_seconds=WORKER_SLEEP_SECONDS, - ).loop() + worker_config = WorkerConfig() + init_logger(worker_config.common.log_level) + connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) + connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.cache.mongo_url) + + SplitsWorker(worker_config).loop() diff --git a/workers/splits/src/splits/response.py b/workers/splits/src/splits/response.py index 8dbe9718..623a7a84 100644 --- a/workers/splits/src/splits/response.py +++ b/workers/splits/src/splits/response.py @@ -41,2 +41,2 @@ def get_dataset_split_full_names(dataset: str, use_auth_token: Union[bool, str, - for config in get_dataset_config_names(dataset, use_auth_token=use_auth_token) - for split in get_dataset_split_names(dataset, config, use_auth_token=use_auth_token) + for config in get_dataset_config_names(path=dataset, use_auth_token=use_auth_token) + for split in get_dataset_split_names(path=dataset, config_name=config, use_auth_token=use_auth_token) @@ -77 +77 @@ def get_splits_response( - HfApi(endpoint=hf_endpoint).dataset_info(dataset, use_auth_token=use_auth_token) + HfApi(endpoint=hf_endpoint).dataset_info(repo_id=dataset, use_auth_token=use_auth_token) @@ -82 +82 @@ def get_splits_response( - split_full_names = get_dataset_split_full_names(dataset, use_auth_token) + split_full_names = get_dataset_split_full_names(dataset=dataset, use_auth_token=use_auth_token) diff --git a/workers/splits/src/splits/utils.py b/workers/splits/src/splits/utils.py index b0b7191f..c0727b1e 100644 --- a/workers/splits/src/splits/utils.py +++ b/workers/splits/src/splits/utils.py @@ -7,0 +8 @@ from typing import Literal, Optional +from libcommon.exceptions import CustomError @@ -9 +9,0 @@ from libqueue.queue import Queue -from libutils.exceptions import CustomError diff --git a/workers/splits/src/splits/worker.py b/workers/splits/src/splits/worker.py index b36644bc..2b9a8379 100644 --- a/workers/splits/src/splits/worker.py +++ b/workers/splits/src/splits/worker.py @@ -14,0 +15 @@ from libqueue.worker import Worker +from splits.config import WorkerConfig @@ -27,2 +28 @@ class SplitsWorker(Worker): - hf_endpoint: str - hf_token: Optional[str] + config: WorkerConfig @@ -30,17 +30,4 @@ class SplitsWorker(Worker): - def __init__( - self, - hf_endpoint: str, - hf_token: Optional[str] = None, - max_jobs_per_dataset: Optional[int] = None, - sleep_seconds: Optional[int] = None, - max_memory_pct: Optional[int] = None, - max_load_pct: Optional[int] = None, - ): - super().__init__( - sleep_seconds=sleep_seconds, - max_memory_pct=max_memory_pct, - max_load_pct=max_load_pct, - ) - self._queues = Queues(max_jobs_per_dataset=max_jobs_per_dataset) - self.hf_endpoint = hf_endpoint - self.hf_token = hf_token + def __init__(self, worker_config: WorkerConfig): + super().__init__(queue_config=worker_config.queue) + self._queues = Queues(max_jobs_per_dataset=worker_config.queue.max_jobs_per_dataset) + self.config = worker_config @@ -59,2 +46,4 @@ class SplitsWorker(Worker): - response = get_splits_response(dataset, self.hf_endpoint, self.hf_token) - upsert_splits_response(dataset, dict(response), HTTPStatus.OK) + response = get_splits_response( + dataset=dataset, hf_endpoint=self.config.common.hf_endpoint, hf_token=self.config.common.hf_token + ) + upsert_splits_response(dataset_name=dataset, response=dict(response), http_status=HTTPStatus.OK) @@ -63 +52 @@ class SplitsWorker(Worker): - splits_in_cache = get_dataset_first_rows_response_splits(dataset) + splits_in_cache = get_dataset_first_rows_response_splits(dataset_name=dataset) @@ -67 +56 @@ class SplitsWorker(Worker): - delete_first_rows_responses(d, c, s) + delete_first_rows_responses(dataset_name=d, config_name=c, split_name=s) @@ -81,5 +70,5 @@ class SplitsWorker(Worker): - dataset, - dict(err.as_response()), - err.status_code, - err.code, - dict(err.as_response_with_cause()), + dataset_name=dataset, + response=dict(err.as_response()), + http_status=err.status_code, + error_code=err.code, + details=dict(err.as_response_with_cause()), @@ -92,5 +81,5 @@ class SplitsWorker(Worker): - dataset, - dict(e.as_response()), - e.status_code, - e.code, - dict(e.as_response_with_cause()), + dataset_name=dataset, + response=dict(e.as_response()), + http_status=e.status_code, + error_code=e.code, + details=dict(e.as_response_with_cause()), diff --git a/workers/splits/tests/conftest.py b/workers/splits/tests/conftest.py index 5010cf61..86ac4418 100644 --- a/workers/splits/tests/conftest.py +++ b/workers/splits/tests/conftest.py @@ -4 +4 @@ -import os +from pytest import MonkeyPatch, fixture @@ -6 +6 @@ import os -from .utils import HF_ENDPOINT +from splits.config import WorkerConfig @@ -12 +12,18 @@ pytest_plugins = ["tests.fixtures.datasets", "tests.fixtures.files", "tests.fixt -os.environ["HF_ENDPOINT"] = HF_ENDPOINT +# see https://github.com/pytest-dev/pytest/issues/363#issuecomment-406536200 +@fixture(scope="session") +def monkeypatch_session(hf_endpoint: str, hf_token: str): + monkeypatch_session = MonkeyPatch() + monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test") + monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test") + monkeypatch_session.setenv("COMMON_HF_ENDPOINT", hf_endpoint) + monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token) + yield monkeypatch_session + monkeypatch_session.undo() + + +@fixture(scope="session") +def worker_config(monkeypatch_session: MonkeyPatch) -> WorkerConfig: + worker_config = WorkerConfig() + if "test" not in worker_config.cache.mongo_database or "test" not in worker_config.queue.mongo_database: + raise ValueError("Test must be launched on a test mongo database") + return worker_config diff --git a/workers/splits/tests/fixtures/hub.py b/workers/splits/tests/fixtures/hub.py index 8fdc83e1..e493a1ed 100644 --- a/workers/splits/tests/fixtures/hub.py +++ b/workers/splits/tests/fixtures/hub.py @@ -10,0 +11 @@ from typing import Any, Dict, Iterable, List, Optional, TypedDict +import datasets.config @@ -29,0 +31,6 @@ CI_HUB_DATASETS_URL = CI_HUB_ENDPOINT + "/datasets/{repo_id}/resolve/{revision}/ +# Ensure the datasets library uses the expected HuggingFace endpoint +datasets.config.HF_ENDPOINT = CI_HUB_ENDPOINT +datasets.config.HUB_DATASETS_URL = CI_HUB_DATASETS_URL +# Don't increase the datasets download counts on huggingface.co +datasets.config.HF_UPDATE_DOWNLOAD_COUNTS = False + @@ -109,0 +117,5 @@ def hf_token() -> str: [email protected](scope="session") +def hf_endpoint() -> str: + return CI_HUB_ENDPOINT + + diff --git a/workers/splits/tests/test_response.py b/workers/splits/tests/test_response.py index 4c7d398b..8c131c4d 100644 --- a/workers/splits/tests/test_response.py +++ b/workers/splits/tests/test_response.py @@ -5 +5 @@ import pytest -from libutils.exceptions import CustomError +from libcommon.exceptions import CustomError @@ -6,0 +7 @@ from libutils.exceptions import CustomError +from splits.config import WorkerConfig @@ -10 +10,0 @@ from .fixtures.hub import HubDatasets -from .utils import HF_ENDPOINT, HF_TOKEN @@ -27 +27 @@ def test_get_splits_response_simple_csv( - hub_datasets: HubDatasets, name: str, use_token: bool, error_code: str, cause: str + hub_datasets: HubDatasets, name: str, use_token: bool, error_code: str, cause: str, worker_config: WorkerConfig @@ -32 +32,5 @@ def test_get_splits_response_simple_csv( - splits_response = get_splits_response(dataset, HF_ENDPOINT, HF_TOKEN if use_token else None) + splits_response = get_splits_response( + dataset=dataset, + hf_endpoint=worker_config.common.hf_endpoint, + hf_token=worker_config.common.hf_token if use_token else None, + ) @@ -37 +41,5 @@ def test_get_splits_response_simple_csv( - get_splits_response(dataset, HF_ENDPOINT, HF_TOKEN if use_token else None) + get_splits_response( + dataset=dataset, + hf_endpoint=worker_config.common.hf_endpoint, + hf_token=worker_config.common.hf_token if use_token else None, + ) diff --git a/workers/splits/tests/test_worker.py b/workers/splits/tests/test_worker.py index a8079e40..b8e6c6e5 100644 --- a/workers/splits/tests/test_worker.py +++ b/workers/splits/tests/test_worker.py @@ -12,8 +12 @@ from libqueue.queue import _clean_queue_database, connect_to_queue -from splits.config import ( - HF_ENDPOINT, - HF_TOKEN, - MAX_JOBS_PER_DATASET, - MAX_LOAD_PCT, - MAX_MEMORY_PCT, - WORKER_SLEEP_SECONDS, -) +from splits.config import WorkerConfig @@ -22,8 +14,0 @@ from splits.worker import SplitsWorker -from .utils import MONGO_CACHE_DATABASE, MONGO_QUEUE_DATABASE, MONGO_URL - - [email protected](autouse=True, scope="module") -def safe_guard() -> None: - if "test" not in MONGO_CACHE_DATABASE: - raise ValueError("Test must be launched on a test mongo database") - @@ -32,3 +17,3 @@ def safe_guard() -> None: -def client() -> None: - connect_to_cache(database=MONGO_CACHE_DATABASE, host=MONGO_URL) - connect_to_queue(database=MONGO_QUEUE_DATABASE, host=MONGO_URL) +def client(worker_config: WorkerConfig) -> None: + connect_to_cache(database=worker_config.cache.mongo_database, host=worker_config.cache.mongo_url) + connect_to_queue(database=worker_config.queue.mongo_database, host=worker_config.queue.mongo_url) @@ -44,9 +29,2 @@ def clean_mongo_database() -> None: -def worker() -> SplitsWorker: - return SplitsWorker( - hf_endpoint=HF_ENDPOINT, - hf_token=HF_TOKEN, - max_jobs_per_dataset=MAX_JOBS_PER_DATASET, - max_load_pct=MAX_LOAD_PCT, - max_memory_pct=MAX_MEMORY_PCT, - sleep_seconds=WORKER_SLEEP_SECONDS, - ) +def worker(worker_config: WorkerConfig) -> SplitsWorker: + return SplitsWorker(worker_config) diff --git a/workers/splits/tests/utils.py b/workers/splits/tests/utils.py index a4094cc6..4e3fdff0 100644 --- a/workers/splits/tests/utils.py +++ b/workers/splits/tests/utils.py @@ -4 +3,0 @@ -import os @@ -7,14 +5,0 @@ from typing import Tuple -from libutils.utils import get_str_value - -DEFAULT_HF_ENDPOINT: str = "https://huggingface.co" -DEFAULT_HF_TOKEN: str = "" -DEFAULT_MONGO_CACHE_DATABASE: str = "datasets_server_cache_test" -DEFAULT_MONGO_QUEUE_DATABASE: str = "datasets_server_queue_test" -DEFAULT_MONGO_URL: str = "mongodb://localhost:27017" - -HF_ENDPOINT = get_str_value(d=os.environ, key="HF_ENDPOINT", default=DEFAULT_HF_ENDPOINT) -HF_TOKEN = get_str_value(d=os.environ, key="HF_TOKEN", default=DEFAULT_HF_TOKEN) -MONGO_CACHE_DATABASE = get_str_value(d=os.environ, key="MONGO_CACHE_DATABASE", default=DEFAULT_MONGO_CACHE_DATABASE) -MONGO_QUEUE_DATABASE = get_str_value(d=os.environ, key="MONGO_QUEUE_DATABASE", default=DEFAULT_MONGO_QUEUE_DATABASE) -MONGO_URL = get_str_value(d=os.environ, key="MONGO_URL", default=DEFAULT_MONGO_URL) -
00edbb39da18379c22965a73cbb99ae7dd96a37b
Sylvain Lesage
2022-10-17T20:00:21
feat: 🎸 remove obsolete DATASETS_REVISION (#611)
diff --git a/chart/docker-images.yaml b/chart/docker-images.yaml index 6b322ed7..25f5a4a7 100644 --- a/chart/docker-images.yaml +++ b/chart/docker-images.yaml @@ -7,2 +7,2 @@ - "splits": "huggingface/datasets-server-workers-splits:sha-e9ce81d", - "firstRows": "huggingface/datasets-server-workers-first_rows:sha-e9ce81d" + "splits": "huggingface/datasets-server-workers-splits:sha-68b31e3", + "firstRows": "huggingface/datasets-server-workers-first_rows:sha-f7cfa4a" diff --git a/chart/templates/worker/first-rows/_container.tpl b/chart/templates/worker/first-rows/_container.tpl index 24bb26f7..6eb244e4 100644 --- a/chart/templates/worker/first-rows/_container.tpl +++ b/chart/templates/worker/first-rows/_container.tpl @@ -11,2 +10,0 @@ - - name: DATASETS_REVISION - value: {{ .Values.worker.firstRows.datasetsRevision | quote }} diff --git a/chart/templates/worker/splits/_container.tpl b/chart/templates/worker/splits/_container.tpl index a9f31f98..8a30a6ad 100644 --- a/chart/templates/worker/splits/_container.tpl +++ b/chart/templates/worker/splits/_container.tpl @@ -7,2 +6,0 @@ - - name: DATASETS_REVISION - value: {{ .Values.worker.splits.datasetsRevision | quote }} diff --git a/chart/values.yaml b/chart/values.yaml index 3a3f9255..f518bd2b 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -119,2 +118,0 @@ worker: - # Git reference for the canonical datasets on https://github.com/huggingface/datasets - datasetsRevision: "main" @@ -149,2 +146,0 @@ worker: - # Git reference for the canonical datasets on https://github.com/huggingface/datasets - datasetsRevision: "main" diff --git a/workers/first_rows/README.md b/workers/first_rows/README.md index 11ecb37e..dcd76c9e 100644 --- a/workers/first_rows/README.md +++ b/workers/first_rows/README.md @@ -11 +10,0 @@ Set environment variables to configure the following aspects: -- `DATASETS_REVISION`: git reference for the canonical datasets on https://github.com/huggingface/datasets. Defaults to `main`. diff --git a/workers/first_rows/src/first_rows/config.py b/workers/first_rows/src/first_rows/config.py index bc8f7893..012532b1 100644 --- a/workers/first_rows/src/first_rows/config.py +++ b/workers/first_rows/src/first_rows/config.py @@ -13 +12,0 @@ from first_rows.constants import ( - DEFAULT_DATASETS_REVISION, @@ -33 +31,0 @@ ASSETS_DIRECTORY = get_str_or_none_value(d=os.environ, key="ASSETS_DIRECTORY", d -DATASETS_REVISION = get_str_value(d=os.environ, key="DATASETS_REVISION", default=DEFAULT_DATASETS_REVISION) @@ -50,3 +47,0 @@ WORKER_SLEEP_SECONDS = get_int_value(os.environ, "WORKER_SLEEP_SECONDS", DEFAULT -# Ensure the datasets library uses the expected revision for canonical datasets -# this one has to be set via an env variable unlike the others - this might be fixed in `datasets` at one point -os.environ["HF_SCRIPTS_VERSION"] = DATASETS_REVISION diff --git a/workers/first_rows/src/first_rows/constants.py b/workers/first_rows/src/first_rows/constants.py index a1a8c612..f77d7d3f 100644 --- a/workers/first_rows/src/first_rows/constants.py +++ b/workers/first_rows/src/first_rows/constants.py @@ -8 +7,0 @@ DEFAULT_ASSETS_DIRECTORY: None = None -DEFAULT_DATASETS_REVISION: str = "main" diff --git a/workers/splits/README.md b/workers/splits/README.md index 30ca1af1..87c5aa77 100644 --- a/workers/splits/README.md +++ b/workers/splits/README.md @@ -9 +8,0 @@ Set environment variables to configure the following aspects: -- `DATASETS_REVISION`: git reference for the canonical datasets on https://github.com/huggingface/datasets. Defaults to `main`. diff --git a/workers/splits/src/splits/config.py b/workers/splits/src/splits/config.py index 3865181d..437d4376 100644 --- a/workers/splits/src/splits/config.py +++ b/workers/splits/src/splits/config.py @@ -11 +10,0 @@ from splits.constants import ( - DEFAULT_DATASETS_REVISION, @@ -24 +22,0 @@ from splits.constants import ( -DATASETS_REVISION = get_str_value(d=os.environ, key="DATASETS_REVISION", default=DEFAULT_DATASETS_REVISION) @@ -36,3 +33,0 @@ WORKER_SLEEP_SECONDS = get_int_value(os.environ, "WORKER_SLEEP_SECONDS", DEFAULT -# Ensure the datasets library uses the expected revision for canonical datasets -# this one has to be set via an env variable unlike the others - this might be fixed in `datasets` at one point -os.environ["HF_SCRIPTS_VERSION"] = DATASETS_REVISION diff --git a/workers/splits/src/splits/constants.py b/workers/splits/src/splits/constants.py index f6e3d204..3881fca9 100644 --- a/workers/splits/src/splits/constants.py +++ b/workers/splits/src/splits/constants.py @@ -6 +5,0 @@ from typing import Optional -DEFAULT_DATASETS_REVISION: str = "main"