diff --git a/.gitignore b/.gitignore index 0e92a827194b319ee14fca111b26a3d42ac6c325..d33f921d34d307107506e57631c324602c53ab46 100644 --- a/.gitignore +++ b/.gitignore @@ -1,25 +1,3 @@ # Python __pycache__/* *.pyc - -# cSpell -cspell.json - -# debugfile -.vscode/launch.json - -# tmp files -tmp.py -tmp.png - -# MacOS -.DS_Store - -# tmp files -tmp.py - -## to allow temporary data drops without pushing it to the hub -data/*/tmp/* - -## node_modules -**/node_modules/ \ No newline at end of file diff --git a/.vscode/data/memo/tmp/Corpus-v1.1 b/.vscode/data/memo/tmp/Corpus-v1.1 deleted file mode 160000 index 7205897f1f3ee65e296072f3e96d49488e54e8ce..0000000000000000000000000000000000000000 --- a/.vscode/data/memo/tmp/Corpus-v1.1 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 7205897f1f3ee65e296072f3e96d49488e54e8ce diff --git a/.vscode/settings.json b/.vscode/settings.json index 456f0046d12837d5153c15b0b38f37a5caf02d6f..3e99ede35449c19657050b5668845a4690f035b3 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,7 +1,7 @@ { "python.testing.pytestArgs": [ - "src/tests" + "." ], "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true, + "python.testing.pytestEnabled": true } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 77247710aa071612d17d80af63bd1cf32458bfe7..0000000000000000000000000000000000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,175 +0,0 @@ - -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - -## [v1.2.12] - 2025-09-16 - -### Added - -- Added dataset: historical-danish-handwriting - -## [v1.2.11 - 2025-09-02 - -### Changed - -- Updated Contributing.md to include the activation of the environment - -### Added - -- Added dataset: wiki-comments - -## [v1.2.10] - 2025-08-18 - -### Changed - -- Updated the wiki, wikibooks, wikisource datasets. -- Changed `wiki` to `wikipedia` -- Fixed rounding error in average token count -- Improved the speed of token counting - -### Added - -- Added `create.py` for wiki, wikibooks, wikisource. - -## [v1.2.9] - 2025-08-05 - -### Docs - -- Average document length now uses tokens instead of characters -- Added vizualization for checking document length in sub datasets -- Changes to `*/descriptive_stats.json`: - - The object no longer includes revision. - - Now include character-level metrics along with minimum and maximum length. Removed average document length as it is computable from existing metrics. -- Removed per-dataset histograms from the main readme. The goal is to avoid loading the entire dataset when updating the readme. This should make it easier for contributors. -- Simplifying PR workflow in `contributing.md` - -### CI -- Fixes bug causing `make update-descriptive-stats` to fail when not having a linear commit history. The script now skips a dataset update based on revision, but only if the `descriptive_stats.json` file does not exist. To ensure that the main readme is always up to date, we change the make command always to update it. - -## [v1.2.8] - 2025-08-05 - -### Added - -- Added dataset: Enevældens Nyheder Online (`enevaeldens_nyheder`). This brings us to >5B tokens! - -## [v1.2.7] - 2025-07-22 - -### Added - -- Added dataset: Grundtvigs Works (`grundtvig`) -- Added bias and risk section to the README - -## [v1.2.6] - 2025-07-21 - -### Added - -- Added two table to get an overview of data by license and domain - -### Changed - -- Dataset overview table now appears in a drop down menu - -## [v1.2.5] - 2025-07-08 - -### Added - -- Added the `domsdatabasen` dataset. - -## [v1.2.4] - 2025-07-08 - -### Added - -- Add a plot for tokens over time to see how the dataset develops -- Minor documentation improvements in the main readme - -### Changed - -- Rename `scrape_hovedstaden` to `health_hovedstaden` avoid confusion with its pretty name - -## [v1.2.3] - 2025-06-30 - -### Added - -- Added a `create.py` script for the `retsinformationdk` dataset. - - Resulted in a boost in tokens and documents - -### Changed - -- Did a full stats update on datasets, resulting in minor changes in a few datasheets - -## [v1.2.2] - 2025-06-26 - -### Added - -- Added the new `scrape_hovedstaden` dataset. -- Added a new domain type `Medical`. - -## [v1.2.1] - 2025-06-24 - -### Fixed - -- Updated the danske-taler dataset. This version fixes a problem where the texts from the API contains no newlines, and where there should have been newline there is now space between words and punctuation. - -## [v1.2.0] - 2025-06-23 - -### Fixed - -- Updated the memo dataset, this second version fixed previous [issues](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/67) with the download and processing of the Danish Memo which cut off the text leading to notably smaller documents. - -## [v1.1.1] - 2025-06-16 - -### Added - -- Added tests to ensure that 1 tokens document don't appear in the data. This filtered out 0 documents in total. - -## [v1.1.0] - 2025-04-29 - -### Added - -- Added multiple quality controls - - Removed all empty string - - Removed duplicates across within datasets -- Restructured datasets - - Removed columns from the dataset to make the structure more lightweight, these include domain, metadata, and license. These have been moved to the individual datasheets. It is still possible to filter for license by using the dataset name - - Added column for number of tokens -- For developers - - Restructered CI codebase substantially - - Added `DataSheet` to make CI for convenient - - factored out plots and tables - -### Docs - -- Sorted overview table -- Minor changes to dataset documentation - - -## [v1.0.12] - 2025-05-08 - -### Added - -- Added new datasets - - Norwegian Colossal Corpus (newspapers) (~191.08K tokens) - - Norwegian Colossal Corpus (books) (~531.97M tokens) - - Norwegian Colossal Corpus (maalfrid) (~29.26M tokens) - - Norwegian Colossal Corpus (parliament) (~338.87M tokens) - -## [v1.0.11] - 2025-03-29 - -### Added - -- Added new datasets (more than 1B tokens 🎉) - - AI Aktindsigt - - Cellar - - Danske Taler - - Miljøportalen - - EUR-Lex SUM - - Finansministeriets Udgivelser - -### Docs - -- Sorted main table in readme -- Added Changelog -- Minor changes to dataset documentation diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 6844a561a4f23620131914c4141c1fcaaa81fb02..0000000000000000000000000000000000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,117 +0,0 @@ -## Working with dataset locally - -A huggingface datasets repository is a GitHub repository like any other. You can simply download it like so: - -```bash -git clone https://huggingface.co/datasets/danish-foundation-models/danish-dynaword -cd danish-dynaword -git lfs pull # download large files to ensure that tests works -``` - -You can the work with the dataset locally like so: - -```py -from datasets import load_dataset - -name = "../." # instead of "danish-foundation-models/danish-dynaword" -dataset = load_dataset("../.", split="train") -# make transformations here -``` - -> Note: While it is local Huggingface still uses a cache, therefore you might need to reset it after changes have been made to see that it works correctly. You can do this by deleting the cached files which you can locate using `dataset.cache_files`. - -## Adding a new dataset - -To add a new dataset you will have to create a folder under `data/{dataset_name}/`, which should look as follows: - -``` - data/dataset_name - |- dataset_name.md - |- dataset_name.parquet - |- create.py # optional -``` - -The create.py is an optional python script that allow you to recreate the dataset from the source. This is typically to allow us to reproduce the -dataset with fixes or update the dataset to the latest version using an API. - -## Installing dependencies - -This repo comes with a few dependencies you need to install to make this run. It uses a [makefile](https://opensource.com/article/18/8/what-how-makefile) to run commands and a [uv](https://docs.astral.sh/uv/) for package management. Once you have uv installed you can install the dependencies using: - -```bash -make install -``` - -Now you can activate the environment with: - -``` -source .venv/bin/activate -``` - -## Running dataset tests - -This dataset is special as it comes with a test suite, e.g. testing in the ids are unique and that the format is consistent. You can run the suite using - -```bash -make test -``` - -## Submitting a PR - -Creating a PR on Huggingface is a bit different from creating one on Github. - -1) Go to the community tab on huggingface press *new pull request* and choose *on your machine*. Specify the title of the your PR. Then you can simply: - -```bash -git checkout -b {new branch name} -# make your changes here - -# push to hub -# you might need to first login: -# huggingface-cli login -git push origin HEAD:refs/pr/{PR NUMBER} -``` -Where HEAD refers to the current branch. - -Before you make the PR do be sure to make sure that you have completed the checklist below. - -### Making changes to an existing PR - -As a contributor you might need to develop on an existing branch. To do so you you -```bash -# fetch and checkout existing branch: -git fetch origin refs/pr/{PR NUMBER}:pr/{PR NUMBER} -git checkout pr/{PR NUMBER} -# make your changes here - -# push changes -``` - -### Checklist - -- [ ] I have run the test suite using `make test` and all tests pass -- [ ] I have added/changed a dataset: - - [ ] I have updated descriptive statistics using `make update-descriptive-statistics` - - [ ] I have bumped the version use `make bump-version` -- [ ] If I have added a `create.py` script I have added the [script dependencies](https://docs.astral.sh/uv/guides/scripts/#declaring-script-dependencies) required to run that script. -- [ ] I have updated the CHANGELOG.md if appropriate - - -### Examples of Previous PRs -To see example PR you can see the following: - -- [Restructuring columns in the dataset](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/11) -- [Adding a new dataset](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/15) -- Updated [dataset description and metadata](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/20) - -## Frequently asked questions - -### Do you accept synthetic dataets - -Yes we do generally accept synthetic datasets since it will likely be a promising research direction for low- to mid-resource languages. -However, you should be aware that synthetic dataset will probably require a more detailed examination and description. -We will for instance examine the quality of the synthetic subset and whether the model used for the creation permits resharing of the synthetic data under permissible licenses. - -### Do you accept non-Danish data - -Generally this repository is intended for Danish text, however quite broadly defined. For instance, we do accept data containing [code-switching](https://www.google.com/search?client=safari&rls=en&q=code+switching&ie=UTF-8&oe=UTF-8) and historical Danish text. diff --git a/README.md b/README.md index b83db27c6ec2a4aacce86e3a11ec155305c109b3..272fc2ced112055b79d43c8d4931e57f97fec0ef 100644 --- a/README.md +++ b/README.md @@ -1,85 +1,10 @@ --- -annotations_creators: -- no-annotation -language_creators: -- crowdsourced -language: -- da -license: cc0-1.0 -multilinguality: -- monolingual -source_datasets: -- original -task_categories: -- text-generation -task_ids: -- language-modeling -tags: -- text-corpus -- continual-development -- community-collaboration -pretty_name: Danish Dynaword +license: other configs: - config_name: default data_files: - split: train - path: data/*/*.parquet -- config_name: ai-aktindsigt - data_files: - - split: train - path: data/ai-aktindsigt/*.parquet -- config_name: cellar - data_files: - - split: train - path: data/cellar/*.parquet -- config_name: enevaeldens_nyheder - data_files: - - split: train - path: data/enevaeldens_nyheder/*.parquet -- config_name: grundtvig - data_files: - - split: train - path: data/grundtvig/*.parquet -- config_name: danske-taler - data_files: - - split: train - path: data/danske-taler/*.parquet -- config_name: ncc_books - data_files: - - split: train - path: data/ncc_books/*.parquet -- config_name: ncc_newspaper - data_files: - - split: train - path: data/ncc_newspaper/*.parquet -- config_name: ncc_maalfrid - data_files: - - split: train - path: data/ncc_maalfrid/*.parquet -- config_name: ncc_parliament - data_files: - - split: train - path: data/ncc_parliament/*.parquet -- config_name: eur-lex-sum-da - data_files: - - split: train - path: data/eur-lex-sum-da/*.parquet -- config_name: miljoeportalen - data_files: - - split: train - path: data/miljoeportalen/*.parquet -- config_name: fm-udgivelser - data_files: - - split: train - path: data/fm-udgivelser/*.parquet -- config_name: memo - data_files: - - split: train - path: data/memo/*.parquet -- config_name: opensubtitles - data_files: - - split: train - path: data/opensubtitles/*.parquet + path: 'data/*/*.parquet' - config_name: retsinformationdk data_files: - split: train @@ -152,303 +77,97 @@ configs: data_files: - split: train path: data/synne/*.parquet -- config_name: wikipedia - data_files: - - split: train - path: data/wikipedia/*.parquet -- config_name: wiki-comments +- config_name: wiki data_files: - split: train - path: data/wiki-comments/*.parquet -- config_name: nordjyllandnews - data_files: - - split: train - path: data/nordjyllandnews/*.parquet + path: data/wiki/*.parquet - config_name: relig data_files: - split: train path: data/relig/*.parquet -- config_name: nota - data_files: - - split: train - path: data/nota/*.parquet -- config_name: health_hovedstaden - data_files: - - split: train - path: data/health_hovedstaden/*.parquet -- config_name: domsdatabasen - data_files: - - split: train - path: data/domsdatabasen/*.parquet -- config_name: historical-danish-handwriting - data_files: - - split: train - path: data/historical-danish-handwriting/*.parquet +annotations_creators: +- no-annotation +language_creators: +- crowdsourced +language: +- da +multilinguality: +- monolingual +source_datasets: +- original +task_categories: +- text-generation +task_ids: +- language-modeling +pretty_name: Danish Gigaword language_bcp47: - da - da-bornholm - da-synnejyl --- - - - -# 🧨 Danish Dynaword - +# Danish Gigaword 2 - -| | | -| ------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Version** | 1.2.12 ([Changelog](/CHANGELOG.md)) | -| **Language** | dan, dansk, Danish | -| **License** | Openly Licensed, See the respective dataset | -| **Models** | For model trained used this data see [danish-foundation-models](https://huggingface.co/danish-foundation-models) | -| **Contact** | If you have question about this project please create an issue [here](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions) | +*Version*: 2.0.0 - - - +*License*: See the respective dataset ## Table of Contents -- [🧨 Danish Dynaword](#-danish-dynaword) +- [Danish Gigaword 2](#danish-gigaword-2) - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Loading the dataset](#loading-the-dataset) - - [Languages](#languages) - - [Domains](#domains) - - [Licensing](#licensing) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - - [Curation Rationale](#curation-rationale) - - [Annotations](#annotations) - [Source Data](#source-data) - - [Data Collection and Processing](#data-collection-and-processing) - - [Dataset Statistics](#dataset-statistics) - - [Contributing to the dataset](#contributing-to-the-dataset) - - [Citation Information](#citation-information) - - [License information](#license-information) - - [Personal and Sensitive Information](#personal-and-sensitive-information) - - [Bias, Risks, and Limitations](#bias-risks-and-limitations) - - [Notice and takedown policy](#notice-and-takedown-policy) + - [Additional Information](#additional-information) + - [Citation Information](#citation-information) ## Dataset Description - -- **Number of samples**: 5.61M -- **Number of tokens (Llama 3)**: 5.89B -- **Average document length in tokens (min, max)**: 1.05K (2, 9.81M) - - +This is intended as a second version of the Danish Gigaword corpus. It is intended to be continually updated with new data sources. This is currently a work in progress. ### Dataset Summary -The Danish dynaword is a collection of Danish free-form text datasets from various domains. All of the datasets in Danish Dynaword are openly licensed -and deemed permissible for training large language models. - -Danish Dynaword is continually developed, which means that the dataset will actively be updated as new datasets become available. If you would like to contribute a dataset see the [contribute section](#contributing-to-the-dataset). +The Danish Gigaword Corpus contains text spanning several domains and forms. ### Loading the dataset ```py from datasets import load_dataset -name = "danish-foundation-models/danish-dynaword" +name = "danish-foundation-models/danish-gigaword" ds = load_dataset(name, split = "train") sample = ds[1] # see "Data Instances" below -``` -or load it by streaming the data -```py +# or load by streaming the data ds = load_dataset(name, split = "train", streaming=True) -dataset_iter = iter(ds) -sample = next(iter(dataset_iter)) -``` - -You can also load a single subset at a time: -```py -ds = load_dataset(name, "adl", split = "train") +sample = next(iter(ds)) ``` - -As Danish Dynaword is continually expanding and curated you can make sure that you get the same dataset every time by specifying the revision: -You can also load a single subset at a time: -```py -ds = load_dataset(name, revision="{desired revision}") -``` - -### Languages -This dataset includes the following languages: - -- Danish (dan-Latn) as we as the dialects Bornholmsk (dan-Latn-bornholm) and Synderjysk (dan-Latn-synnejyl) - -In addition it likely contains small amounts of English due to code-switching and Norwegian due to the historical relation between the two languages and language misclassificaitons due to their similarity. - -Language is denoted using [BCP-47](https://en.wikipedia.org/wiki/IETF_language_tag), using the langauge code ISO 639-3 and the script code ISO 15924. The third element denote the region variant. - - -### Domains - -This dynaword consist of data from various domains (e.g., legal, books, social media). The following table and figure give an overview of the relative distributions of these domains. To see a full overview of the source check out the [source data section](#source-data) - -
- -
- - - -| Domain | Sources | N. Tokens | -|:-------------|:---------------------------------------------------------------------------------------------------------|:------------| -| Legal | [cellar], [eur-lex-sum-da], [fm-udgivelser], [retsinformationdk], [skat], [retspraksis], [domsdatabasen] | 2.32B | -| News | [enevaeldens_nyheder], [ncc_newspaper], [tv2r], [nordjyllandnews] | 1.09B | -| Books | [grundtvig], [ncc_books], [memo], [adl], [wikibooks], [jvj], [gutenberg], [relig] | 733.92M | -| Conversation | [danske-taler], [opensubtitles], [ep], [ft], [spont], [naat] | 497.09M | -| Social Media | [hest] | 389.32M | -| Other | [ncc_parliament], [dannet], [depbank], [synne], [historical-danish-handwriting] | 345.79M | -| Web | [ai-aktindsigt], [ncc_maalfrid], [miljoeportalen] | 295.87M | -| Encyclopedic | [wikisource], [wikipedia], [wiki-comments] | 185.75M | -| Medical | [health_hovedstaden] | 27.07M | -| Readaloud | [nota] | 7.30M | -| Dialect | [botxt] | 847.97K | -| **Total** | | 5.89B | - -[ai-aktindsigt]: data/ai-aktindsigt/ai-aktindsigt.md -[cellar]: data/cellar/cellar.md -[enevaeldens_nyheder]: data/enevaeldens_nyheder/enevaeldens_nyheder.md -[grundtvig]: data/grundtvig/grundtvig.md -[danske-taler]: data/danske-taler/danske-taler.md -[ncc_books]: data/ncc_books/ncc_books.md -[ncc_newspaper]: data/ncc_newspaper/ncc_newspaper.md -[ncc_maalfrid]: data/ncc_maalfrid/ncc_maalfrid.md -[ncc_parliament]: data/ncc_parliament/ncc_parliament.md -[eur-lex-sum-da]: data/eur-lex-sum-da/eur-lex-sum-da.md -[miljoeportalen]: data/miljoeportalen/miljoeportalen.md -[fm-udgivelser]: data/fm-udgivelser/fm-udgivelser.md -[memo]: data/memo/memo.md -[opensubtitles]: data/opensubtitles/opensubtitles.md -[retsinformationdk]: data/retsinformationdk/retsinformationdk.md -[ep]: data/ep/ep.md -[ft]: data/ft/ft.md -[wikisource]: data/wikisource/wikisource.md -[spont]: data/spont/spont.md -[tv2r]: data/tv2r/tv2r.md -[adl]: data/adl/adl.md -[hest]: data/hest/hest.md -[skat]: data/skat/skat.md -[dannet]: data/dannet/dannet.md -[retspraksis]: data/retspraksis/retspraksis.md -[wikibooks]: data/wikibooks/wikibooks.md -[jvj]: data/jvj/jvj.md -[gutenberg]: data/gutenberg/gutenberg.md -[botxt]: data/botxt/botxt.md -[depbank]: data/depbank/depbank.md -[naat]: data/naat/naat.md -[synne]: data/synne/synne.md -[wikipedia]: data/wikipedia/wikipedia.md -[wiki-comments]: data/wiki-comments/wiki-comments.md -[nordjyllandnews]: data/nordjyllandnews/nordjyllandnews.md -[relig]: data/relig/relig.md -[nota]: data/nota/nota.md -[health_hovedstaden]: data/health_hovedstaden/health_hovedstaden.md -[domsdatabasen]: data/domsdatabasen/domsdatabasen.md -[historical-danish-handwriting]: data/historical-danish-handwriting/historical-danish-handwriting.md - - -
- -
- -

- -

- -
- -
- - -### Licensing - -The following gives an overview of the licensing in the Dynaword. To get the exact license of the individual datasets check out the [overview table](#source-data). -These license is applied to the constituent data, i.e., the text. The collection of datasets (metadata, quality control, etc.) is licensed under [CC-0](https://creativecommons.org/publicdomain/zero/1.0/legalcode.en). - - -| License | Sources | N. Tokens | -|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------| -| CC-BY-SA 4.0 | [cellar], [enevaeldens_nyheder], [eur-lex-sum-da], [fm-udgivelser], [memo], [tv2r], [jvj], [depbank] | 2.41B | -| CC-0 | [grundtvig], [danske-taler], [ncc_books], [ncc_newspaper], [miljoeportalen], [opensubtitles], [ep], [ft], [wikisource], [spont], [adl], [hest], [skat], [retspraksis], [wikibooks], [botxt], [naat], [synne], [wikipedia], [wiki-comments], [nordjyllandnews], [relig], [nota], [health_hovedstaden] | 2.06B | -| Other (No attribution required) | [retsinformationdk], [domsdatabasen] | 904.61M | -| Other (Attribution required) | [ai-aktindsigt], [ncc_maalfrid], [ncc_parliament], [dannet], [gutenberg] | 515.61M | -| CC-BY 4.0 | [historical-danish-handwriting] | 5.20M | -| **Total** | | 5.89B | - -[ai-aktindsigt]: data/ai-aktindsigt/ai-aktindsigt.md -[cellar]: data/cellar/cellar.md -[enevaeldens_nyheder]: data/enevaeldens_nyheder/enevaeldens_nyheder.md -[grundtvig]: data/grundtvig/grundtvig.md -[danske-taler]: data/danske-taler/danske-taler.md -[ncc_books]: data/ncc_books/ncc_books.md -[ncc_newspaper]: data/ncc_newspaper/ncc_newspaper.md -[ncc_maalfrid]: data/ncc_maalfrid/ncc_maalfrid.md -[ncc_parliament]: data/ncc_parliament/ncc_parliament.md -[eur-lex-sum-da]: data/eur-lex-sum-da/eur-lex-sum-da.md -[miljoeportalen]: data/miljoeportalen/miljoeportalen.md -[fm-udgivelser]: data/fm-udgivelser/fm-udgivelser.md -[memo]: data/memo/memo.md -[opensubtitles]: data/opensubtitles/opensubtitles.md -[retsinformationdk]: data/retsinformationdk/retsinformationdk.md -[ep]: data/ep/ep.md -[ft]: data/ft/ft.md -[wikisource]: data/wikisource/wikisource.md -[spont]: data/spont/spont.md -[tv2r]: data/tv2r/tv2r.md -[adl]: data/adl/adl.md -[hest]: data/hest/hest.md -[skat]: data/skat/skat.md -[dannet]: data/dannet/dannet.md -[retspraksis]: data/retspraksis/retspraksis.md -[wikibooks]: data/wikibooks/wikibooks.md -[jvj]: data/jvj/jvj.md -[gutenberg]: data/gutenberg/gutenberg.md -[botxt]: data/botxt/botxt.md -[depbank]: data/depbank/depbank.md -[naat]: data/naat/naat.md -[synne]: data/synne/synne.md -[wikipedia]: data/wikipedia/wikipedia.md -[wiki-comments]: data/wiki-comments/wiki-comments.md -[nordjyllandnews]: data/nordjyllandnews/nordjyllandnews.md -[relig]: data/relig/relig.md -[nota]: data/nota/nota.md -[health_hovedstaden]: data/health_hovedstaden/health_hovedstaden.md -[domsdatabasen]: data/domsdatabasen/domsdatabasen.md -[historical-danish-handwriting]: data/historical-danish-handwriting/historical-danish-handwriting.md - - - - ## Dataset Structure -The dataset contains text from different sources which are thoroughly defined in [Source Data](#source-data). +The dataset contains text from different sources which are thoroughly defined in [Source Data](#source-data). See the [homepage](https://gigaword.dk) or [paper](https://aclanthology.org/2021.nodalida-main.46.pdf) for more information. ### Data Instances Each entry in the dataset consists of a single text with associated metadata - ```py { - "id": "digibok_2009033103031", - "text": "P. FR. RIST. OLAF RYES SAGA. OPTEGNELSER, DAGBØGER OG BREVE. DET NORDISKE FORLAG. Denne Bog søger at[...]", - "source": "ncc_books", - "added": "2025-05-08", - "created": "1899-01-01, 1899-12-31", - "token_count": 192301 + 'text': 'Vimoutiers er en kommune i departementet Orne i Basse-Normandie regionen i det nordvestlige Frankrig.\nCykelløbet Paris-Camembert slutter i Vimoutiers.\nHistorie.\nDen 14. juni 1944, under invasionen i Normandiet blev Vimoutiers bombarderet af allierede styrker. Landsbyen blev ødelagt og 220 civile dræbt.\nPersonligheder.\nPolitikeren Joseph Laniel (1889-1975) var født i Vomoutiers.', + 'source': 'wiki', + 'id': 'wiki_366127', + 'added': '2021-03-28', + 'created': '2019-01-01, 2021-01-01', + 'metadata': + {'domain': 'Wiki & Books', + 'license': 'Creative Commons Legal Code\n\nCC0 1.0 Universal', 'source-pretty': 'Wikipedia' + } } ``` @@ -456,13 +175,15 @@ Each entry in the dataset consists of a single text with associated metadata An entry in the dataset consists of the following fields: -- `id` (`str`): An unique identifier for each document. - `text`(`str`): The content of the document. - `source` (`str`): The source of the document (see [Source Data](#source-data)). +- `id` (`str`): An unique identifer for each document. - `added` (`str`): An date for when the document was added to this collection. - `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +- `metadata/license` (`str`): The license of the document. The licenses vary according to the source. +- `metadata/domain` (`str`): The domain of the source +- `metadata/source-pretty` (`str`): The longform version of the short-form source name + ### Data Splits @@ -470,198 +191,128 @@ The entire corpus is provided in the `train` split. ## Dataset Creation -### Curation Rationale - -These datasets were collected and curated with the intention of making openly license Danish data available. While this was collected with the intention of developing language models it is likely to have multiple other uses such as examining language development and differences across domains. - - - -### Annotations - -This data generally contains no annotation besides the metadata attached to each sample such as what domain it belongs to. - ### Source Data - -Below follows a brief overview of the sources in the corpus along with their individual license. To get more information about the individual dataset click the hyperlink in the table. - -
-Overview Table (click to unfold) - -You can learn more about each dataset by pressing the link in the first column. - - -| Source | Description | Domain | N. Tokens | License | -|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------|:------------|:-----------------------| -| [cellar] | The official digital repository for European Union legal documents and open data | Legal | 1.15B | [CC-BY-SA 4.0] | -| [enevaeldens_nyheder] | High quality OCR'd texts from Danish and Norwegian newspapers during the period of constitutional absolutism in Denmark (1660–1849) | News | 1.03B | [CC-BY-SA 4.0] | -| [retsinformationdk] | [retsinformation.dk](https://www.retsinformation.dk) (legal-information.dk) the official legal information system of Denmark | Legal | 818.25M | [Danish Copyright Law] | -| [ncc_books] | Danish books extracted from the [Norwegian Colossal Corpus](https://huggingface.co/datasets/NbAiLab/NCC) derived from OCR | Books | 531.97M | [CC-0] | -| [hest] | Samples from the Danish debate forum www.heste-nettet.dk | Social Media | 389.32M | [CC-0] | -| [ncc_parliament] | Collections from the Norwegian parliament in Danish. Extracted from the [Norwegian Colossal Corpus](https://huggingface.co/datasets/NbAiLab/NCC) derived from ocr | Other | 338.87M | [NLOD 2.0] | -| [opensubtitles] | Danish subsection of [OpenSubtitles](https://opus.nlpl.eu/OpenSubtitles/corpus/version/OpenSubtitles) | Conversation | 271.60M | [CC-0] | -| [wikipedia] | The Danish subsection of [wikipedia](https://en.wikipedia.org/wiki/Main_Page) | Encyclopedic | 173.33M | [CC-0] | -| [ai-aktindsigt] | Multiple web scrapes from municipality websites collected as a part of the [AI-aktindsigt](https://ai-aktindsigt.dk) project | Web | 139.23M | [Apache 2.0] | -| [miljoeportalen] | Data from [Danmarks Miljøportalen](https://www.miljoeportal.dk/om-danmarks-miljoeportal/) (Denmark's Environment Portal) | Web | 127.38M | [CC-0] | -| [skat] | Skat is the Danish tax authority. This dataset contains content from its website skat.dk | Legal | 122.11M | [CC-0] | -| [ft] | Records from all meetings of The Danish parliament (Folketinget) in the parliament hall | Conversation | 114.09M | [CC-0] | -| [memo] | The MeMo corpus comprising almost all Danish novels from the period 1870-1899, known as the Modern Breakthrough | Books | 113.74M | [CC-BY-SA 4.0] | -| [ep] | The Danish subsection of [Europarl](https://aclanthology.org/2005.mtsummit-papers.11/) | Conversation | 100.84M | [CC-0] | -| [domsdatabasen] | [Domsdatabasen.dk](https://domsdatabasen.dk/) is a public database containing selected judgments from the Danish courts | Legal | 86.35M | [Danish Copyright Law] | -| [adl] | Danish literature from 1700-2023 from the [Archive for Danish Literature](https://tekster.kb.dk/text?editorial=no&f%5Bsubcollection_ssi%5D%5B%5D=adl&match=one&search_field=Alt) (ADL) | Books | 58.49M | [CC-0] | -| [retspraksis] | Case law or judical practice in Denmark derived from [Retspraksis](https://da.wikipedia.org/wiki/Retspraksis) | Legal | 56.26M | [CC-0] | -| [fm-udgivelser] | The official publication series of the Danish Ministry of Finance containing economic analyses, budget proposals, and fiscal policy documents | Legal | 50.34M | [CC-BY-SA 4.0] | -| [nordjyllandnews] | Articles from the Danish Newspaper [TV2 Nord](https://www.tv2nord.dk) | News | 37.90M | [CC-0] | -| [eur-lex-sum-da] | The Danish subsection of EUR-lex SUM consisting of EU legislation paired with professionally written summaries | Legal | 31.37M | [CC-BY-SA 4.0] | -| [ncc_maalfrid] | Danish content from Norwegian institutions websites | Web | 29.26M | [NLOD 2.0] | -| [health_hovedstaden] | Guidelines and informational documents for healthcare professionals from the Capital Region | Medical | 27.07M | [CC-0] | -| [tv2r] | Contemporary Danish newswire articles published between 2010 and 2019 | News | 21.67M | [CC-BY-SA 4.0] | -| [grundtvig] | The complete collection of [Grundtvig](https://en.wikipedia.org/wiki/N._F._S._Grundtvig) (1783-1872) one of Denmark’s most influential figures | Books | 10.53M | [CC-0] | -| [danske-taler] | Danish Speeches from [dansketaler.dk](https://www.dansketaler.dk) | Conversation | 8.72M | [CC-0] | -| [wikibooks] | The Danish Subsection of [Wikibooks](https://www.wikibooks.org) | Books | 7.63M | [CC-0] | -| [nota] | The text only part of the [Nota lyd- og tekstdata](https://sprogteknologi.dk/dataset/nota-lyd-og-tekstdata) dataset | Readaloud | 7.30M | [CC-0] | -| [gutenberg] | The Danish subsection from Project [Gutenberg](https://www.gutenberg.org) | Books | 6.76M | [Gutenberg] | -| [wikisource] | The Danish subsection of [Wikisource](https://en.wikisource.org/wiki/Main_Page) | Encyclopedic | 6.28M | [CC-0] | -| [wiki-comments] | Text from the comments sections of the Danish Wikipedia | Encyclopedic | 6.14M | [CC-0] | -| [historical-danish-handwriting] | Minutes from City and Parish Council meetings between 1841 and 1939 from [The Historical Danish handwriting dataset](https://huggingface.co/datasets/aarhus-city-archives/historical-danish-handwriting) | Other | 5.20M | [CC-BY 4.0] | -| [jvj] | The works of the Danish author and poet, [Johannes V. Jensen](https://da.wikipedia.org/wiki/Johannes_V._Jensen) | Books | 3.55M | [CC-BY-SA 4.0] | -| [spont] | Conversational samples collected as a part of research projects at Aarhus University | Conversation | 1.56M | [CC-0] | -| [dannet] | [DanNet](https://cst.ku.dk/projekter/dannet) is a Danish WordNet | Other | 1.48M | [DanNet 1.0] | -| [relig] | Danish religious text from the 1700-2022 | Books | 1.24M | [CC-0] | -| [ncc_newspaper] | OCR'd Newspapers derived from [NCC](https://huggingface.co/datasets/NbAiLab/NCC) | News | 1.05M | [CC-0] | -| [botxt] | The Bornholmsk Ordbog Dictionary Project | Dialect | 847.97K | [CC-0] | -| [naat] | Danish speeches from 1930-2022 | Conversation | 286.68K | [CC-0] | -| [depbank] | The Danish subsection of the [Universal Dependencies Treebank](https://github.com/UniversalDependencies/UD_Danish-DDT) | Other | 185.45K | [CC-BY-SA 4.0] | -| [synne] | Dataset collected from [synnejysk forening's website](https://www.synnejysk.dk), covering the Danish dialect sønderjysk | Other | 52.02K | [CC-0] | -| **Total** | | | 5.89B | | - -[ai-aktindsigt]: data/ai-aktindsigt/ai-aktindsigt.md -[cellar]: data/cellar/cellar.md -[enevaeldens_nyheder]: data/enevaeldens_nyheder/enevaeldens_nyheder.md -[grundtvig]: data/grundtvig/grundtvig.md -[danske-taler]: data/danske-taler/danske-taler.md -[ncc_books]: data/ncc_books/ncc_books.md -[ncc_newspaper]: data/ncc_newspaper/ncc_newspaper.md -[ncc_maalfrid]: data/ncc_maalfrid/ncc_maalfrid.md -[ncc_parliament]: data/ncc_parliament/ncc_parliament.md -[eur-lex-sum-da]: data/eur-lex-sum-da/eur-lex-sum-da.md -[miljoeportalen]: data/miljoeportalen/miljoeportalen.md -[fm-udgivelser]: data/fm-udgivelser/fm-udgivelser.md -[memo]: data/memo/memo.md -[opensubtitles]: data/opensubtitles/opensubtitles.md -[retsinformationdk]: data/retsinformationdk/retsinformationdk.md -[ep]: data/ep/ep.md -[ft]: data/ft/ft.md -[wikisource]: data/wikisource/wikisource.md -[spont]: data/spont/spont.md -[tv2r]: data/tv2r/tv2r.md -[adl]: data/adl/adl.md -[hest]: data/hest/hest.md -[skat]: data/skat/skat.md -[dannet]: data/dannet/dannet.md -[retspraksis]: data/retspraksis/retspraksis.md -[wikibooks]: data/wikibooks/wikibooks.md -[jvj]: data/jvj/jvj.md -[gutenberg]: data/gutenberg/gutenberg.md -[botxt]: data/botxt/botxt.md -[depbank]: data/depbank/depbank.md -[naat]: data/naat/naat.md -[synne]: data/synne/synne.md -[wikipedia]: data/wikipedia/wikipedia.md -[wiki-comments]: data/wiki-comments/wiki-comments.md -[nordjyllandnews]: data/nordjyllandnews/nordjyllandnews.md -[relig]: data/relig/relig.md -[nota]: data/nota/nota.md -[health_hovedstaden]: data/health_hovedstaden/health_hovedstaden.md -[domsdatabasen]: data/domsdatabasen/domsdatabasen.md -[historical-danish-handwriting]: data/historical-danish-handwriting/historical-danish-handwriting.md - - -[CC-0]: https://creativecommons.org/publicdomain/zero/1.0/legalcode.en -[CC-BY-SA 4.0]: https://creativecommons.org/licenses/by-sa/4.0/deed.en -[CC-BY 4.0]: https://creativecommons.org/licenses/by/4.0/deed.en -[Apache 2.0]: https://www.apache.org/licenses/LICENSE-2.0 -[NLOD 2.0]: ./data/ncc_maalfrid/ncc_maalfrid.md#license-information -[NLOD 2.0]: ./data/ncc_parliament/ncc_parliament.md#license-information -[Danish Copyright Law]: ./data/retsinformationdk/retsinformationdk.md#license-information -[DanNet 1.0]: ./data/dannet/dannet.md#license-information -[Gutenberg]: ./data/gutenberg/gutenberg.md#license-information -[Danish Copyright Law]: ./data/domsdatabasen/domsdatabasen.md#license-information - - -
- - -### Data Collection and Processing - -Danish Dynaword is continually developed, which means that the dataset will actively be updated as new datasets become available. This means that the size of Dynaword increases over time as seen in the following plot: - -

- -

- -The data collection and processing varies depending on the dataset and is documentationed the individual datasheets, which is linked in the above table. If possible the collection is documented both in the datasheet and in the reproducible script (`data/{dataset}/create.py`). - -In addition to data specific processing we also run a series automated quality checks to ensure formatting (e.g. ensuring correctly formatted columns and unique IDs), quality checks (e.g. duplicate and empty string detection) and datasheet documentation checks. These checks are there to ensure a high quality of documentation and a minimal level of quality. To allow for the development of novel cleaning methodologies we do not provide more extensive cleaning. - -### Dataset Statistics -The following plot(s) are intended to give an overview of docuements length in the various sources. - -

- -

- - - -### Contributing to the dataset - -We welcome contributions to the dataset, including new sources, improved data filtering, and other enhancements. To get started on contributing, please see [the contribution guidelines](CONTRIBUTING.md) - -## Citation Information - -If you use this work, please cite the [scientific article](https://arxiv.org/abs/2508.02271), we recommend citing the following: - -> Enevoldsen, K.C., Jensen, K.N., Kostkan, J., Szab'o, B.I., Kardos, M., Vad, K., Heinsen, J., N'unez, A.B., Barmina, G., Nielsen, J., Larsen, R., Vahlstrup, P.B., Dalum, P.M., Elliott, D., Galke, L., Schneider-Kamp, P., & Nielbo, K.L. (2025). Dynaword: From One-shot to Continuously Developed Datasets. - - -``` -@article{enevoldsen2025dynaword, - title={Dynaword: From One-shot to Continuously Developed Datasets}, - author={Enevoldsen, Kenneth and Jensen, Kristian N{\o}rgaard and Kostkan, Jan and Szab{\'o}, Bal{\'a}zs and Kardos, M{\'a}rton and Vad, Kirten and N{\'u}{\~n}ez, Andrea Blasi and Barmina, Gianluca and Nielsen, Jacob and Larsen, Rasmus and others}, - journal={arXiv preprint arXiv:2508.02271}, - year={2025} +Below follows a brief overview of the sources in the corpus along with their individual license. + +| Source | License | +| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| adl | Creative Commons Legal Code 1.0 Universal | +| botxt | Creative Commons Legal Code 1.0 Universal | +| dannet | [dannet license](https://cst.ku.dk/projekter/dannet/license.txt) | +| depbank | Attribution-ShareAlike 4.0 International | +| ep | Creative Commons Legal Code 1.0 Universal | +| ft | Creative Commons Legal Code 1.0 Universal | +| gutenberg | [gutenberg license](https://www.gutenberg.org/policy/license.html) | +| hest | Creative Commons Legal Code 1.0 Universal | +| jvj | Attribution-ShareAlike 4.0 International | +| naat | Creative Commons Legal Code 1.0 Universal | +| relig | Creative Commons Legal Code 1.0 Universal | +| retsinformationdk | Danish Copyright law at https://www.retsinformation.dk/forms/r0710.aspx?id=164796 states "§ 9. Love, administrative forskrifter, retsafgørelser og lignende offentlige aktstykker er ikke genstand for ophavsret. Stk. 2. Bestemmelsen i stk. 1 gælder ikke for værker, der fremtræder som selvstændige bidrag i de i stk. 1 nævnte aktstykker. Sådanne værker må dog gengives i forbindelse med aktstykket. Retten til videre udnyttelse afhænger af de i øvrigt gældende regler." | +| retspraksis | Creative Commons Legal Code 1.0 Universal | +| skat | Creative Commons Legal Code 1.0 Universal | +| spont | Creative Commons Legal Code 1.0 Universal | +| synne | Creative Commons Legal Code 1.0 Universal | +| tv2r | The owner of this content is TV2 Regionerne, Denmark. Creative Commons Attribution 4.0 International | +| wiki | Creative Commons Legal Code 1.0 Universal | +| wikibooks | Creative Commons Legal Code 1.0 Universal | +| wikisource | Creative Commons Legal Code 1.0 Universal | + +These sources corresponds to the following top-level domains in the dataset: +```python +# mapping from domain to top-level domain +domain_mapping_dict = { + "retsinformationdk": "Legal", + "skat": "Legal", + "retspraksis": "Legal", + "hest": "Social Media", + "cc": "Web", + "adl": "Wiki & Books", + "botxt": "Other", + "danavis": "News", + "dannet": "dannet", + "depbank": "Other", + "ep": "Conversation", + "ft": "Conversation", + "gutenberg": "Wiki & Books", + "jvj": "Wiki & Books", + "naat": "Conversation", + "opensub": "Conversation", + "relig": "Wiki & Books", + "spont": "Conversation", + "synne": "Other", + "tv2r": "News", + "wiki": "Wiki & Books", + "wikibooks": "Wiki & Books", + "wikisource": "Wiki & Books", + "twfv19": "Social Media", # not present in this version of the dataset } ``` -Additionally, we recommend citing the relevant source datasets as well. See the individual datasheets for more information. - -## License information - -The license for each constituent dataset is supplied in the [Source data](#source-data) table. This license is applied to the constituent data, i.e., the text. The collection of datasets (metadata, quality control, etc.) is licensed under [CC-0](https://creativecommons.org/publicdomain/zero/1.0/legalcode.en). +And the following mapping translates between the short form and the long form of the source name +```python +# mapping from domain to its long name format +longname_mapping_dict = { + "retsinformationdk": "retsinformation.dk (Danish legal information)", + "skat": "Skat (Danish tax authority)", + "retspraksis": "retspraksis (Danish legal information)", + "hest": "Hestenettet (Danish debate forum)", + "cc": "Common Crawl", + "adl": " Archive for Danish Literature", + "botxt": "Bornholmsk (Danish dialect)", + "danavis": "Danish daily newspapers", + "dannet": "DanNet (Danish WordNet)", + "depbank": "Danish Dependency Treebank", + "ep": "European Parliament", + "ft": "Folketinget (Danish Parliament)", + "gutenberg": "Gutenberg", + "jvj": "Johannes V. Jensen (Danish author/poet)", + "naat": "NAAT", + "opensub": "Open Subtitles", + "relig": "Religious texts", + "spont": "Spontaneous speech", + "synne": "Synderjysk (Danish dialect)", + "tv2r": "TV 2 Radio (Danish news)", + "wiki": "Wikipedia", + "wikibooks": "Wikibooks", + "wikisource": "Wikisource", + "twfv19": "Twitter Folketingsvalget 2019 (Danish election tweets)", # not present in this version of the dataset +} +``` -### Personal and Sensitive Information +## Additional Information -As far as we are aware the dataset does not contain information identifying sexual orientation, political beliefs, religion, or health connected with utterer ID. In case that such information is present in the data we have been removed utterer information from social media content. -### Bias, Risks, and Limitations +### Citation Information -Certain works in this collection are historical works and thus reflect the linguistic, cultural, and ideological norms of their time. -As such, it includes perspectives, assumptions, and biases characteristic of the period. For instance, the works of N.F.S. Grundtvig (`grundtvig`) were known to nationalistic views and critical stances toward specific groups, such as Germans, which may be considered offensive or exclusionary by contemporary standards. +The original version of Danish Gigawords was created as a part of the following publication. +> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). -### Notice and takedown policy -We redistribute files shared with us under a license permitting such redistribution. If you have concerns about the licensing of these files, please [contact us](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/new). If you consider that the data contains material that infringe your copyright, please: -- Clearly identify yourself with detailed contact information such as an address, a telephone number, or an email address at which you can be contacted. -- Clearly reference the original work claimed to be infringed -- Clearly identify the material claimed to be infringing and information reasonably sufficient to allow us to locate the material. -You can contact us through this channel. -We will comply with legitimate requests by removing the affected sources from the next release of the corpus +``` +@inproceedings{dagw, + title = {{The Danish Gigaword Corpus}}, + author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, + year = 2021, + booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, + publisher = {NEALT} +} +``` ---- -

- - - - A Danish Foundation Models dataset -

+ \ No newline at end of file diff --git a/data/adl/adl.md b/data/adl/adl.md index a820ea58764dfba4aeb7fbc3824aed68514aa162..ff296079e753ef4c0c0788b4d6488635658befe7 100644 --- a/data/adl/adl.md +++ b/data/adl/adl.md @@ -1,99 +1,57 @@ --- -pretty_name: Archive for Danish Literature +pretty_name: Archive for Danish Literature language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Books + - language-modeling --- - -# Dataset Card for Archive for Danish Literature - +# Dataset Card for Archive for Danish Literature ## Dataset Description - - -Danish literature from 1700-2023 from the [Archive for Danish Literature](https://tekster.kb.dk/text?editorial=no&f%5Bsubcollection_ssi%5D%5B%5D=adl&match=one&search_field=Alt) (ADL). - - -Archive for Danish Literature (ADL) is a literary-historical collection of selected parts of older Danish literature, from the Middle Ages up to the mid-20th century. -It provides access to both the texts and introductory material on most of the authors. ADL is a resource for research, teaching, and broad dissemination of older Danish -literature. Currently, ADL contains works by 78 authors. The texts are reproduced from standard printed editions. All texts are searchable, and many can also be viewed as facsimiles (photographs of the original edition) -on the Danish Royal Library's [website](https://tekster.kb.dk/text?editorial=no&f%5Bsubcollection_ssi%5D%5B%5D=adl&match=one&search_field=Alt). - -See also dataset [entry](https://sprogteknologi.dk/dataset/public-adl-text-sources) on sprogteknologi.dk and an [API](https://rawgit.com/Det-Kongelige-Bibliotek/access-digital-objects/master/form-demos/adl-form.html). - - -- **Number of samples**: 498 -- **Number of tokens (Llama 3)**: 58.49M -- **Average document length in tokens (min, max)**: 117.46K (53, 662.14K) - - - - -## Dataset Structure +- **Number of records:** 498 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "adl_aakjaer06val", - "text": "SAMLEDE VÆRKER\n\nJEPPE AAKJÆR GYLDENDALSKE BOGHANDEL - NORDISK FORLAG KJØBENHAVN OG\nKRISTIANIA 1919 0[...]", - "source": "adl", - "added": "2020-09-14", - "created": "1700-01-01, 2022-01-01", - "token_count": 439908 + 'text': 'SAMLEDE VÆRKER + +JEPPE AAKJÆR GYLDENDALSKE BOGHANDE', + 'source': 'adl', + 'id': 'adl_aakjaer06val', + 'added': '2020-09-14', + 'created': '1700-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': ' Archive for Danish Literature' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: +## Data Fields -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code - -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/adl/adl.parquet b/data/adl/adl.parquet index 7908766197c5434c18f289ab0a275ca42c9ce201..139ec0afee98b3bcccc34e88c1543288ce6a3533 100644 --- a/data/adl/adl.parquet +++ b/data/adl/adl.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7511f1ff1da6a3c04148ca5bd0395d9e2e702520b0c0bf3c8774428b5dc27f7f -size 106403262 +oid sha256:d51c291d1cf6461a1e59dd45dfd63ee39a5c62cd3c2fd05877489d50aaa5115e +size 106409966 diff --git a/data/adl/descriptive_stats.json b/data/adl/descriptive_stats.json deleted file mode 100644 index f72249488e841fe1c9f5864bd3b4600df0381fb7..0000000000000000000000000000000000000000 --- a/data/adl/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 498, - "number_of_tokens": 58493311, - "min_length_tokens": 53, - "max_length_tokens": 662143, - "number_of_characters": 161816257, - "min_length_characters": 136, - "max_length_characters": 1879004 -} \ No newline at end of file diff --git a/data/adl/images/dist_document_length.png b/data/adl/images/dist_document_length.png deleted file mode 100644 index 390b0e919f03bb556c6ba4108af1d1ebb30681e0..0000000000000000000000000000000000000000 --- a/data/adl/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c720774f1c72e77402153edfa8f3390872bae88638dc3bfe9f2551815994f8eb -size 539253 diff --git a/data/ai-aktindsigt/ai-aktindsigt.md b/data/ai-aktindsigt/ai-aktindsigt.md deleted file mode 100644 index f3840d68577322aebe03991f78618e6abda35200..0000000000000000000000000000000000000000 --- a/data/ai-aktindsigt/ai-aktindsigt.md +++ /dev/null @@ -1,85 +0,0 @@ ---- -pretty_name: AI Aktindsigt -language: -- da -license: apache-2.0 -license_name: Apache 2.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Web -source_datasets: -- AI-aktindsigt/Skrabet_kommunale_hjemmesider ---- - -# Dataset Card for AI Aktindsigt - - -Multiple web scrapes from municipality websites collected as a part of the [AI-aktindsigt](https://ai-aktindsigt.dk) project. - - -The dataset consists of multiple scrapes of municipal websites compiled in connection with the work on the [AI-aktindsigt](https://ai-aktindsigt.dk) project. The scrape is made across different domains from several different municipalities. - -## Dataset Description - - - -- **Number of samples**: 200.91K -- **Number of tokens (Llama 3)**: 139.23M -- **Average document length in tokens (min, max)**: 693.0064405666105 (9, 152.60K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "ai-aktindsigt_0", - "text": "Vallensbæk Stationstorv 100 2665 Vallensbæk Strand Telefon: +45 4797 4000", - "source": "ai-aktindsigt", - "added": "2025-03-24", - "created": "2010-01-01, 2024-03-18", - "token_count": 29 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - - -### Sourced data -This dataset is derived from [`AI-aktindsigt/Skrabet_kommunale_hjemmesider`](https://huggingface.co/datasets/AI-aktindsigt/Skrabet_kommunale_hjemmesider/tree/main -) - -### Citation Information - -No citation is applicable for this work. We recommend citing the huggingface repository. diff --git a/data/ai-aktindsigt/ai-aktindsigt.parquet b/data/ai-aktindsigt/ai-aktindsigt.parquet deleted file mode 100644 index bf88f1bf607aa81efeeafb2bc558c7538e91f0d0..0000000000000000000000000000000000000000 --- a/data/ai-aktindsigt/ai-aktindsigt.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2554e61d9924e4f1b6a5df33a1bdf33cef91137a69ae9f75bd79755d2815c52e -size 214499633 diff --git a/data/ai-aktindsigt/create.py b/data/ai-aktindsigt/create.py deleted file mode 100644 index e8f352636aacfb5b073fa5562577ab719429971d..0000000000000000000000000000000000000000 --- a/data/ai-aktindsigt/create.py +++ /dev/null @@ -1,64 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0", -# ] -# /// -""" -This script is used to create the data for the AI-aktindsigt project. - -This derived the data from a .json.gz file. -""" - -from pathlib import Path -from typing import cast - -from datasets import Dataset, load_dataset - -source = "ai-aktindsigt" - - -def convert_sample(example): - # {'text': 'Vallensbæk Stationstorv 100 2665 Vallensbæk Strand Telefon: +45 4797 4000', - # 'id': '0_03fe7662f6d37df0ffbf5013907414f935350db9931043891a95ed830965a507a7bcb4df93741429bdfa4958cf25f6c273aa73146f2be80948f767eb5fa04645', - # 'source': 'AI-aktindsigt', - # 'added': '2024-04-16T12:35:52.000Z', - # 'metadata': {'url': 'https://vallensbaek.dk/', 'kommune': 'vallensbaek', 'sentence': 1, - # 'ppl_score': [634.6341], - # 'sha512': '03fe7662f6d37df0ffbf5013907414f935350db9931043891a95ed830965a507a7bcb4df93741429bdfa4958cf25f6c273aa73146f2be80948f767eb5fa04645'} - # } - - new_example = dict( - text_new=example["text"], - source=source, - domain="Web", - license="Apache-2.0", - added="2025-03-24", - created="2010-01-01, 2024-03-18", # Start date is approximate guess end date is the date of the last update - metadata={"source-pretty": "AI Aktindsigt"}, - ) - - return new_example - - -def main(): - data_path = Path( - "/work/dfm-data/pre-training/ai_aktindsigt/documents/ai_aktindsigt.jsonl.gz" - ) - ds = load_dataset("json", data_files=data_path.as_posix(), split="train") - - ds = cast(Dataset, ds) - - ds = ds.map(convert_sample, remove_columns=ds.column_names) - ds = ds.rename_columns({"text_new": "text"}) - ds = ds.add_column("id", [f"{source}_{i}" for i in range(len(ds))]) # type: ignore - ds = ds.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - save_path = Path(__file__).parent / f"{source}.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/ai-aktindsigt/descriptive_stats.json b/data/ai-aktindsigt/descriptive_stats.json deleted file mode 100644 index 460c5808fc703945adc683666f80c7c829bd3253..0000000000000000000000000000000000000000 --- a/data/ai-aktindsigt/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 200914, - "number_of_tokens": 139234696, - "min_length_tokens": 9, - "max_length_tokens": 152599, - "number_of_characters": 408005923, - "min_length_characters": 29, - "max_length_characters": 406832 -} \ No newline at end of file diff --git a/data/ai-aktindsigt/images/dist_document_length.png b/data/ai-aktindsigt/images/dist_document_length.png deleted file mode 100644 index 6eb71c17aa4d58174071ee0d50411b7dc0227846..0000000000000000000000000000000000000000 --- a/data/ai-aktindsigt/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:32d7c50d2b47fd31198d4fd28ead503c423562c8a4cdc317c45271785b3a6393 -size 562318 diff --git a/data/botxt/botxt.md b/data/botxt/botxt.md index e4f70b4d8e8eed1fe0bac0705ed42de1e13dea8c..2428d8ad0a694ea84f6ba6b201d26316c2d0ba1a 100644 --- a/data/botxt/botxt.md +++ b/data/botxt/botxt.md @@ -1,94 +1,57 @@ --- -pretty_name: Bornholmsk +pretty_name: Bornholmsk (Danish dialect) language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -domains: -- Dialect -- Web -source_datasets: -- danish-foundation-models/danish-gigaword + - language-modeling --- - -# Dataset Card for Bornholmsk - +# Dataset Card for Bornholmsk (Danish dialect) ## Dataset Description - - -The Bornholmsk Ordbog Dictionary Project - - -Fictional texts of various kinds written in Bornholmsk, the dialect spoken on the Danish island of Bornholm (The language code for Bornholmsk under IETF BCP-47 is da-bornholm), have been digitized (OCR’ed and proofread) by volunteers working within the recently resumed Bornholmsk Ordbog dictionary project (Kjeldsen, 2019). Most of the material included is written by Otto J. Lund in the period 1930-48 (novels, short stories, and poems). The Bornholmsk subcorpus, which in its present state amounts to circa 400 K words, also includes folk stories published by J. P. Kuhre in 1938, and by K. M. Kofoed in 1935, fictional letters by various authors published in the 1930s, as well as poems by Alfred Jensen published in 1948 and various other texts from the same period. The non-standardized orthography varies considerably from source to source. The Bornholmsk part of the Danish Gigaword is a significantly extended dataset, well beyond that studied in earlier NLP work on the dialect [(Derczynski and Kjeldsen, 2019)](https://aclanthology.org/W19-6138/). - - - -- **Number of samples**: 106 -- **Number of tokens (Llama 3)**: 847.97K -- **Average document length in tokens (min, max)**: 8.00K (407, 83.79K) - - - - -## Dataset Structure +- **Number of records:** 106 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "botxt_0000040", - "text": "Ræua-Lârs\n\nRæua-Lârs å hans Konna, Stina, bode uda i Torpabakkana. Hanj hed nok æjla Lârs\nNielsen, m[...]", - "source": "botxt", - "added": "2024-05-16", - "created": "2000-01-01, 2022-01-01", - "token_count": 7229 + 'text': 'Ræua-Lârs + +Ræua-Lârs å hans Konna, Stina, bode uda', + 'source': 'botxt', + 'id': 'botxt_0000040', + 'added': '2024-05-16', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Other', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Bornholmsk (Danish dialect)' + } } ``` -### Data Fields +## Data Fields -An entry in the dataset consists of the following fields: +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/botxt/botxt.parquet b/data/botxt/botxt.parquet index 7645a6e938e9484624d00b8b8db45868dd05a2e0..b9cc1d74ff5c3e84d9b0bf6a507b6672c8389ac0 100644 --- a/data/botxt/botxt.parquet +++ b/data/botxt/botxt.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9948ed3d6cfd26c57086eacee83097f7abb8f8b95ae1639b5e17b1025ebdfb5e -size 1343525 +oid sha256:b42642896dfda21b23bb8e8ef5ba65f878ebfa5fec2f6d57aec1e06778c75bbf +size 1353171 diff --git a/data/botxt/descriptive_stats.json b/data/botxt/descriptive_stats.json deleted file mode 100644 index 3251d1847651978a20eaa852763dedef586b7294..0000000000000000000000000000000000000000 --- a/data/botxt/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 106, - "number_of_tokens": 847973, - "min_length_tokens": 407, - "max_length_tokens": 83792, - "number_of_characters": 2011076, - "min_length_characters": 845, - "max_length_characters": 202015 -} \ No newline at end of file diff --git a/data/botxt/images/dist_document_length.png b/data/botxt/images/dist_document_length.png deleted file mode 100644 index 948a0f98666675f6f2bacf44d8177f521964367d..0000000000000000000000000000000000000000 --- a/data/botxt/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:92930c918e4b6bfbc3a5a1173e3af056d2f93c7d8c0a5cb02ee8604fbea14c41 -size 541369 diff --git a/data/cellar/cellar.md b/data/cellar/cellar.md deleted file mode 100644 index 5fc0349f1b141848c36fed01cff4404527431287..0000000000000000000000000000000000000000 --- a/data/cellar/cellar.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -pretty_name: Cellar -language: -- da -license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Legal ---- - -# Dataset Card for Cellar - - -The official digital repository for European Union legal documents and open data. - - -The EU Dataset Cellar serves as the central access point for all official EU publications, legislation, and open data resources. Maintained by the Publications Office of the European Union, this comprehensive digital archive contains millions of documents in multiple languages, including regulations, directives, decisions, treaties, case law, and preparatory acts dating back decades. The repository employs standardized metadata and unique identifiers to organize its vast collection, making it an essential resource for researchers, legal professionals, policymakers, and citizens seeking authoritative information on EU law and policy. The Cellar's linked data architecture also enables sophisticated search capabilities and integration with other information systems across the European Union's digital landscape. - - -## Dataset Description - - -- **Number of samples**: 63.40K -- **Number of tokens (Llama 3)**: 1.15B -- **Average document length in tokens (min, max)**: 18.17K (7, 2.60M) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "cellar_0", - "text": "\n\n\n\n© Европейски съюз, 2017 г.\n\nВъзпроизвеждането е разрешено при позоваване на оригинала.\n\n© Unión [...]", - "source": "cellar", - "added": "2025-03-25", - "created": "2024-01-01, 2026-01-01", - "token_count": 87018 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - -### Citation Information - -No citation is applicable for this work. diff --git a/data/cellar/cellar.parquet b/data/cellar/cellar.parquet deleted file mode 100644 index 665990a77ca0ab19c2f937265d6d2f52f39e6c4b..0000000000000000000000000000000000000000 --- a/data/cellar/cellar.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6162a90362e286ebc66a8344f39c3fbc835dec85f3e1d51318b7b39181ef4709 -size 1426079196 diff --git a/data/cellar/create.py b/data/cellar/create.py deleted file mode 100644 index ecdd223b3aaacfce19255a6ed3a3f7a24b91e3c5..0000000000000000000000000000000000000000 --- a/data/cellar/create.py +++ /dev/null @@ -1,60 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0", -# ] -# /// - -from pathlib import Path -from typing import cast -from datasets import Dataset, load_dataset, concatenate_datasets - -source = "cellar" - - -def convert_sample(example): - new_example = dict( - text_new=example["text"], - source=source, - domain="Legal", - license="cc-by-sa-4.0", - added="2025-03-25", - created="2024-01-01, 2026-01-01", # Scrape happened within these years - data likely written earlier - metadata={"source-pretty": "Cellar"}, - ) - - return new_example - - -def main(): - data_path = Path("/work/dfm-data/pre-training/cellar/documents") - data_paths = [p.as_posix() for p in data_path.glob("DAN*.jsonl.gz")] - dfs = [] - for i, path in enumerate(data_paths): - print(i, path.split("/")[-1]) - try: - ds = load_dataset( - "json", data_files=path, split="train" - ) # a few datasets fail to load - dfs.append(ds) - print("\tSuccess") - except Exception: - print("\tFail") - - ds = concatenate_datasets(dsets=dfs) - - ds = cast(Dataset, ds) - - ds = ds.map(convert_sample, remove_columns=ds.column_names) - ds = ds.rename_columns({"text_new": "text"}) - ds = ds.add_column("id", [f"{source}_{i}" for i in range(len(ds))]) # type: ignore - ds = ds.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - save_path = Path(__file__).parent / f"{source}.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/cellar/descriptive_stats.json b/data/cellar/descriptive_stats.json deleted file mode 100644 index 8719215bce173193ef04e2a87501f80f94d6e21a..0000000000000000000000000000000000000000 --- a/data/cellar/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 63399, - "number_of_tokens": 1152074881, - "min_length_tokens": 7, - "max_length_tokens": 2599840, - "number_of_characters": 3866568270, - "min_length_characters": 14, - "max_length_characters": 37287484 -} \ No newline at end of file diff --git a/data/cellar/images/dist_document_length.png b/data/cellar/images/dist_document_length.png deleted file mode 100644 index 1b03489a0a4acc653454927d3fe8ff228fd89368..0000000000000000000000000000000000000000 --- a/data/cellar/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c47baf8bd18b1c625e4c5f5b58daa6b7004d25ce54b943a9fefc011260566c93 -size 573507 diff --git a/data/dannet/dannet.md b/data/dannet/dannet.md index 550783d7b5bc13e3fde5f1c6621d842e6521fde5..49d8ebd534362a0db8f3f5368b3a98e7e5d26148 100644 --- a/data/dannet/dannet.md +++ b/data/dannet/dannet.md @@ -1,81 +1,84 @@ --- -pretty_name: DanNet +pretty_name: DanNet (Danish WordNet) language: -- da -license: other -license_name: DanNet 1.0 + - da +license: DanNet 1.0 License +license_name: DanNet 1.0 License size_categories: -- 10k-100k + - 10k-100k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Other + - language-modeling --- - -# Dataset Card for DanNet - - -[DanNet](https://cst.ku.dk/projekter/dannet) is a Danish WordNet. - - - -A WordNet is a lexico-semantic network which show the meaning and the relation between words through named connections. It can be considered a machine-readable dictionary. - - +# Dataset Card for DanNet (Danish WordNet) ## Dataset Description - - - -- **Number of samples**: 47.60K -- **Number of tokens (Llama 3)**: 1.48M -- **Average document length in tokens (min, max)**: 31.079364745919374 (2, 106) - - - - -## Dataset Structure +- **Number of records:** 49040 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. +```yaml +{ + 'text': 'Når fodboldholdet fra 1. division i Ikast spiller ', + 'source': 'dannet', + 'id': 'dannet_46506', + 'added': '2020-09-24', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'dannet', + 'license': 'Commercial Use of DanNet +DanNet may be used in commercial applications in accordance with the following +license agreement. An attorney representing the commercial interest should +review this DanNet license with respect to the intended use. - -```py -{ - "id": "dannet_46506", - "text": "Når fodboldholdet fra 1. division i Ikast spiller hjemmekampe, lyder råbet ud over Ikast Stadion: We[...]", - "source": "dannet", - "added": "2020-09-24", - "created": "2000-01-01, 2022-01-01", - "token_count": 50 -} -``` +DanNet 1.0 License -### Data Fields +DanNet Release 2.1 -An entry in the dataset consists of the following fields: +This software and database is being provided to you, the LICENSEE, by University +of Copenhagen and Society for Danish Language and Literature under the following +license. By obtaining, using and/or copying this software and database, you +agree that you have read, understood, and will comply with these terms and +conditions. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +Permission to use, copy, modify and distribute this software and database and +its documentation for any purpose and without fee or royalty is hereby granted, +provided that you agree to comply with the following copyright notice and +statements, including the disclaimer, and that the same appear on ALL copies of +the software, database and documentation, including modifications that you make +for internal use or for distribution. +THIS SOFTWARE AND DATABASE IS PROVIDED "AS IS" AND UNIVERSITY OF COPENHAGEN and +SOCIETY FOR DANISH LANGUAGE AND LITERATURE MAKE NO REPRESENTATIONS OR +WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, +UNIVERSITY OF COPENHAGEN AND SOCIETY FOR DANISH LANGUAGE AND LITERATURE MAKE NO +REPRESENTATIONS OR WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR +PURPOSE OR THAT THE USE OF THE LICENSED SOFTWARE, DATABASE OR DOCUMENTATION WILL +NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. -### Dataset Statistics +The names of University of Copenhagen and Society for Danish Language and +Literature may not be used in advertising or publicity pertaining to +distribution of the software and/or database. Title to copyright in this +software, database and any associated documentation shall at all times remain +with University of Copenhagen and Society for Danish Language and Literature and +LICENSEE agrees to preserve same. - -

- -

- +DanNet 2.1 Copyright 2009-12 by University of Copenhagen and Society for Danish', + 'source-pretty': 'DanNet (Danish WordNet)' + } +} +``` +## Data Fields +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. ## License Information
@@ -122,32 +125,3 @@ LICENSEE agrees to preserve same. DanNet 2.1 Copyright 2009-12 by University of Copenhagen and Society for Danish

- - - -## Additional Information - - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/dannet/dannet.parquet b/data/dannet/dannet.parquet index 382be3eb13fce06b1a48d877c7fba09ba5281e93..9ce233c620726906889032c71864424d333725d2 100644 --- a/data/dannet/dannet.parquet +++ b/data/dannet/dannet.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2ce98e55703f16406d9b3591297c7b860fa770c9ae55c4795bb7a50921619e43 -size 3918876 +oid sha256:905c2441a4c242e24d370775e9e035df3c67a7a1d797a615297cb6a1bbf51a96 +size 4743422 diff --git a/data/dannet/descriptive_stats.json b/data/dannet/descriptive_stats.json deleted file mode 100644 index fbeab34622d9be9e8ccf72659c609ae883ab3097..0000000000000000000000000000000000000000 --- a/data/dannet/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 47603, - "number_of_tokens": 1479471, - "min_length_tokens": 2, - "max_length_tokens": 106, - "number_of_characters": 4326120, - "min_length_characters": 2, - "max_length_characters": 340 -} \ No newline at end of file diff --git a/data/dannet/images/dist_document_length.png b/data/dannet/images/dist_document_length.png deleted file mode 100644 index 361285658d84a666301efc8a54cce224c5a631ee..0000000000000000000000000000000000000000 --- a/data/dannet/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e41fb3761b6eeee9baea4ebd2c24d548dfbf9b8a9a445677f67f2596b0de2330 -size 553168 diff --git a/data/danske-taler/create.py b/data/danske-taler/create.py deleted file mode 100644 index 1ce38b16972c000eb046f195db277565712d2700..0000000000000000000000000000000000000000 --- a/data/danske-taler/create.py +++ /dev/null @@ -1,314 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "beautifulsoup4==4.13.3", -# "datasets>=3.0.0", -# "transformers", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// -""" -Danske Taler API Downloader -This script downloads speeches/articles from the Danske Taler API: https://www.dansketaler.dk/api/v1 - -It saves it into the following structure: - -``` -{ - "text": "Lav et referat af nedenstående tekst:\n\nTekst:\nOpdatering: Manden er nu fundet af Nordjyllands Politi[...]", - "source": "nordjyllandnews", - "id": "nordjyllandnews_0", - "added": "2024-12-16", - "created": "2000-01-01, 2024-01-01", - "license": "Creative Commons Legal Code\n\nCC0 1.0 Universal", - "domain": "News", - "metadata": { - "source-pretty": "Nordjylland News" - } -} -``` - -Note: To run this script, you need to set `GIT_LFS_SKIP_SMUDGE=1` to be able to install dynaword: - -```bash -GIT_LFS_SKIP_SMUDGE=1 uv run data/memo/create.py -``` - -This second version fixed previous issues with the download and processing of the Danish Memo repository: -https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/67 -""" - -import logging -import time -from datetime import date -from pathlib import Path -from typing import Any - -from datasets import Dataset -import pandas as pd -import requests -from bs4 import BeautifulSoup, NavigableString -from tqdm import tqdm - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) - -# Configuration -API_BASE_URL = "https://www.dansketaler.dk/api/v1" - -KNOWN_HTML_TAGS = { - "html", - "head", - "body", - "title", - "meta", - "link", - "script", - "style", - "div", - "span", - "p", - "a", - "ul", - "ol", - "li", - "table", - "tr", - "td", - "th", - "img", - "h1", - "h2", - "h3", - "h4", - "h5", - "h6", - "strong", - "em", - "br", - "hr", - "form", - "input", - "button", - "label", - "select", - "option", - "textarea", - "iframe", - "nav", - "footer", - "header", - "main", - "section", - "article", -} - - -def contains_html_tags(text): - soup = BeautifulSoup(str(text), "html.parser") - return any(tag.name in KNOWN_HTML_TAGS for tag in soup.find_all()) - - -def get_all_speeches() -> list[dict[str, Any]]: - # fetch first page, notably the total number of pages - url = f"{API_BASE_URL}/speeches?per_page=50" - response = requests.get(url) - response.raise_for_status() - speeches = response.json() - meta = speeches["meta"] - total_pages = meta["total_pages"] - - # fetch all pages - all_speeches = [] - for page in range(1, total_pages + 1): - url = f"{API_BASE_URL}/speeches?per_page=50&page={page}" - response = requests.get(url) - response.raise_for_status() - speeches = response.json() - all_speeches.extend(speeches["speeches"]) - - return all_speeches - - -def fetch_speech_content( - url: str, max_retries: int = 3, backoff_factor: float = 0.5 -) -> tuple[str | None, str]: - """ - Fetches the license div from the page with retry logic. - - Args: - url: The URL to fetch the license div from - max_retries: Maximum number of retry attempts - backoff_factor: Factor to determine exponential backoff time between retries - - Returns: - The text content of the license div if found, None otherwise - """ - retries = 0 - - while retries <= max_retries: - try: - response = requests.get(url, timeout=10) - response.raise_for_status() - - soup = BeautifulSoup(response.text, "html.parser") - license_div = soup.find("div", class_="speech-copyright") - speech_div = soup.find("div", class_="speech-article-content") - speech = "" - if speech_div: - # Iterate over the children of the found div - for child_div in speech_div.children: # type: ignore - if child_div.name == "div": # type: ignore - current_paragraph = [] - for content in child_div.contents: # type: ignore - if isinstance(content, NavigableString): - # Append text content - current_paragraph.append(str(content).strip()) - elif content.name == "br": - # If a
is encountered, join and print the current paragraph, then reset - if current_paragraph: - speech += "".join(current_paragraph) - speech += "\n" # Add a newline for paragraph break - current_paragraph = [] - # Print any remaining text in the current_paragraph list - if current_paragraph: - speech += "".join(current_paragraph) - speech += "\n" # Add a newline for paragraph break - - return (license_div.text if license_div else None, speech) - - except (requests.RequestException, AttributeError) as e: - retries += 1 - - if retries > max_retries: - logger.info( - f"Failed to fetch license after {max_retries} attempts: {str(e)}" - ) - return (None, "") - - # Calculate backoff time using exponential backoff - wait_time = backoff_factor * (2 ** (retries - 1)) - logger.info( - f"Attempt {retries} failed. Retrying in {wait_time:.2f} seconds..." - ) - time.sleep(wait_time) - - return (None, "") - - -def convert_to_license(license_information: str | None) -> str | None: - """checks if "Materialet er fri af ophavsret" is in the page""" - - if license_information and ( - ("Materialet er fri af ophavsret" in license_information) - or ("Materialet er fri af ophvasret" in license_information) - or ("Ophavsretten er bortfaldet" in license_information) - or ("Manuskriptet er fri af ophavsret" in license_information) - or ("Offentlig " == license_information) - ): - return "cc0" - - return license_information - - -def convert_to_row(speech_meta: dict[str, Any]) -> dict[str, Any]: - speech_id = speech_meta["id"] - - date_of_speech = speech_meta["date"]["iso_date"] - date_of_speech_start = f"{date_of_speech}" - date_of_speech_end = f"{date_of_speech}" - - (license_information, speech) = fetch_speech_content(speech_meta["url"]) - - row = { - "id": f"danske-taler_{speech_id}", - "text": speech, - "source": "danske-taler", - # current date - "added": date.today().isoformat(), - "created": f"{date_of_speech_start}, {date_of_speech_end}", - "license_information": license_information, - "domain": "Spoken", - "metadata": {"source-pretty": "Danske Taler"}, - } - - return row - - -def download_speeches() -> pd.DataFrame: - logger.info("Fetching all speeches from Danske Taler API") - speeches = get_all_speeches() - logger.info(f"Found {len(speeches)} speeches") - - rows = [] - for speech in tqdm(speeches): - row = convert_to_row(speech) - rows.append(row) - - logger.info(f"Saving {len(rows)} speeches to dataset") - df = pd.DataFrame(rows) - return df - - -def main(): - save_path = Path(__file__).parent / "danske-taler.parquet" - save_path_all = Path(__file__).parent / "tmp" / "danske-taler-all.parquet" - save_path_all.parent.mkdir(parents=False, exist_ok=True) - - if save_path_all.exists(): - logger.info(f"Loading dataset from {save_path_all}") - df = pd.read_parquet(save_path_all) - else: - logger.info(f"Downloading speeches and saving to {save_path_all}") - df = download_speeches() - df.to_parquet(save_path_all) - - licenses = [convert_to_license(license) for license in df["license_information"]] - df["license"] = licenses - - uniques_licenses = set(df["license"].tolist()) - logger.info("Unique licenses:") - for license in uniques_licenses: - logger.info(f"\t{license}") - - # remove documents without a cc0 license - len_df = len(df) - df = df[df["license"] == "cc0"] - logger.info(f"Removed {len_df - len(df)} documents without a cc0 license") - - dataset = Dataset.from_pandas(df, preserve_index=False) - - dataset = remove_empty_texts(dataset) # remove rows with empty text - dataset = remove_duplicate_text(dataset) # remove rows with duplicate text - dataset = add_token_count(dataset) - dataset = ensure_column_order(dataset) - - assert len(set(dataset["id"])) == len(dataset), "IDs are not unique" - assert len(set(dataset["text"])) == len(dataset), "Texts are not unique" - assert len(set(df["license"])) == 1, "Multiple licenses found" - - # check for html tags in text - assert not df["text"].apply(contains_html_tags).any(), "HTML tags found in text" - - dataset.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / "danske-taler.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/danske-taler/danske-taler.log b/data/danske-taler/danske-taler.log deleted file mode 100644 index 4b21a33634ea587550fbeaea1410de90389fc055..0000000000000000000000000000000000000000 --- a/data/danske-taler/danske-taler.log +++ /dev/null @@ -1,167 +0,0 @@ -2025-03-29 14:14:08,846 - INFO - Downloading speeches and saving to /work/githubs/tmp/danish-dynaword/data/danske-taler/tmp/danske-taler-all.parquet -2025-03-29 14:14:08,847 - INFO - Fetching all speeches from Danske Taler API -2025-03-29 14:15:19,326 - INFO - Found 4725 speeches - 13%|██████████▏ | 597/4725 [01:22<11:15, 6.11it/s]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/niels-hoejlund-pedersens-translokationstale-2020 - 17%|██████████████ | 818/4725 [01:57<09:00, 7.23it/s]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/katrine-lykke-pedersens-tale-til-unge-om-haab-i-en-coronatid - 17%|█████████████▋ | 820/4725 [02:01<1:05:16, 1.00s/it]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/anastacia-halkens-tale-til-unge-om-haab-i-en-coronatid - 18%|██████████████▏ | 828/4725 [02:07<17:53, 3.63it/s]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/thomas-vinterbergs-tale-ved-modtagelsen-af-oscar-prisen - 22%|█████████████████▋ | 1042/4725 [02:41<10:04, 6.09it/s]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/pernille-vermunds-tale-ved-folketingets-aabningsdebat-2021 - 22%|█████████████████▉ | 1059/4725 [02:48<08:22, 7.30it/s]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/pernille-vermunds-tale-ved-nye-borgerliges-aarsmoede-2021 - 22%|█████████████████▌ | 1061/4725 [02:52<1:01:08, 1.00s/it]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/mette-thiesens-tale-ved-nye-borgerliges-aarsmoede-2021 - 22%|█████████████████▌ | 1062/4725 [02:57<2:00:22, 1.97s/it]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/peter-seier-christensens-tale-ved-nye-borgerliges-aarsmoede-2021 - 34%|███████████████████████████▍ | 1617/4725 [04:25<07:09, 7.24it/s]Attempt 1 failed. Retrying in 0.50 seconds... -Attempt 2 failed. Retrying in 1.00 seconds... -Attempt 3 failed. Retrying in 2.00 seconds... -Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/silke-ena-svares-tale-ved-demonstrationen-for-born-og-unge -100%|████████████████████████████████████████████████████████████████████████████████| 4725/4725 [12:43<00:00, 6.19it/s] -2025-03-29 14:28:02,454 - INFO - Saving 4725 speeches to dataset -2025-03-29 14:28:03,330 - INFO - Unique licenses: -2025-03-29 14:28:03,331 - INFO - None -2025-03-29 14:28:03,331 - INFO - Materialet er beskyttet af ophavsret -2025-03-29 14:28:03,331 - INFO - cc0 -2025-03-29 14:28:03,331 - INFO - Materialet er beskyttet af ophavsret, da talen ikke er holdt i offentligheden. -2025-03-29 14:28:03,331 - INFO - Materialet er omfattet af ophavsret -2025-03-29 14:28:03,331 - INFO - Manuskript taget fra ft.dk. med tilladelse fra udgiver. -2025-03-29 14:28:03,331 - INFO - Materialet et beskyttet af ophavsret -2025-03-29 14:28:03,331 - INFO - Manuskript taget fra ft.dk med tilladelse fra udgiver. -2025-03-29 14:28:03,331 - INFO - Materialet er beskyttet af ophavsret -2025-03-29 14:28:03,331 - INFO - Materialet er beskyttet af ophavsret -2025-03-29 14:28:03,461 - INFO - Removed 2063 documents without a cc0 license -2025-03-29 14:28:03,541 - INFO - Removed 0 duplicate ids -2025-03-29 14:28:03,549 - INFO - Removed 2 rows with empty text -2025-03-29 14:28:03,631 - INFO - Removed 2 rows with duplicate text -Creating parquet from Arrow format: 100%|██████████████████████████████████████████████████| 3/3 [00:00<00:00, 11.33ba/s] -2025-06-24 13:03:05,424 - INFO - Found 5103 speeches -2025-06-24 13:04:19,375 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:04:29,734 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:04:30,613 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:04:31,856 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:04:34,098 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/niels-hoejlund-pedersens-translokationstale-2020 -2025-06-24 13:05:10,223 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:05:11,113 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:05:12,575 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:05:14,814 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/katrine-lykke-pedersens-tale-til-unge-om-haab-i-en-coronatid -2025-06-24 13:05:15,208 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:05:15,922 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:05:17,117 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:05:19,583 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/anastacia-halkens-tale-til-unge-om-haab-i-en-coronatid -2025-06-24 13:05:20,875 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:05:21,619 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:05:22,844 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:05:25,074 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/thomas-vinterbergs-tale-ved-modtagelsen-af-oscar-prisen -2025-06-24 13:06:01,599 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:06:02,313 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:06:03,588 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:06:05,817 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/pernille-vermunds-tale-ved-folketingets-aabningsdebat-2021 -2025-06-24 13:06:08,990 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:06:09,675 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:06:10,912 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:06:13,120 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/pernille-vermunds-tale-ved-nye-borgerliges-aarsmoede-2021 -2025-06-24 13:06:13,512 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:06:14,230 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:06:15,462 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:06:17,720 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/mette-thiesens-tale-ved-nye-borgerliges-aarsmoede-2021 -2025-06-24 13:06:17,920 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:06:18,656 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:06:19,902 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:06:22,132 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/peter-seier-christensens-tale-ved-nye-borgerliges-aarsmoede-2021 -2025-06-24 13:07:56,628 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 13:07:57,353 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 13:07:58,586 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 13:08:00,850 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/silke-ena-svares-tale-ved-demonstrationen-for-born-og-unge -2025-06-24 13:19:38,142 - INFO - Saving 5103 speeches to dataset -2025-06-24 13:19:38,322 - INFO - Unique licenses: -2025-06-24 13:19:38,322 - INFO - None -2025-06-24 13:19:38,322 - INFO - cc0 -2025-06-24 13:19:38,322 - INFO - Manuskript taget fra ft.dk. med tilladelse fra udgiver. -2025-06-24 13:19:38,322 - INFO - Manuskript tilsendt af taler og udgivet af Danske Taler med tilladelse fra taler. -2025-06-24 13:19:38,322 - INFO - Materialet er beskyttet af ophavsret, da talen ikke er holdt i offentligheden. -2025-06-24 13:19:38,322 - INFO - Materialet er beskyttet af ophavsret -2025-06-24 13:19:38,322 - INFO - Materialet er beskyttet af ophavsret -2025-06-24 13:19:38,322 - INFO - Materialet et beskyttet af ophavsret -2025-06-24 13:19:38,322 - INFO - Manuskript taget fra ft.dk med tilladelse fra udgiver. -2025-06-24 13:19:38,322 - INFO - Materialet er beskyttet af ophavsret -2025-06-24 13:19:38,322 - INFO - Materialet er omfattet af ophavsret -2025-06-24 13:19:38,325 - INFO - Removed 2188 documents without a cc0 license -2025-06-24 13:19:38,326 - INFO - Removed 0 duplicate ids -2025-06-24 13:19:38,332 - INFO - Removed 1 rows with empty text -2025-06-24 13:19:38,345 - INFO - Removed 2 rows with duplicate text2025-06-24 14:44:36,089 - INFO - Downloading speeches and saving to /Users/kristianjensen/Documents/danish-dynaword/data/danske-taler/tmp/danske-taler-all.parquet -2025-06-24 14:44:36,089 - INFO - Fetching all speeches from Danske Taler API -2025-06-24 14:45:43,887 - INFO - Found 5107 speeches -2025-06-24 14:46:53,929 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:46:54,627 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:46:55,824 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:46:58,015 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/niels-hoejlund-pedersens-translokationstale-2020 -2025-06-24 14:47:34,505 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:47:35,215 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:47:36,514 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:47:38,725 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/katrine-lykke-pedersens-tale-til-unge-om-haab-i-en-coronatid -2025-06-24 14:47:39,093 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:47:39,798 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:47:41,013 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:47:43,253 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/anastacia-halkens-tale-til-unge-om-haab-i-en-coronatid -2025-06-24 14:47:44,528 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:47:45,272 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:47:46,492 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:47:48,691 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/thomas-vinterbergs-tale-ved-modtagelsen-af-oscar-prisen -2025-06-24 14:48:26,340 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:48:27,037 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:48:28,248 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:48:30,496 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/pernille-vermunds-tale-ved-folketingets-aabningsdebat-2021 -2025-06-24 14:48:33,382 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:48:34,125 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:48:35,339 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:48:37,570 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/pernille-vermunds-tale-ved-nye-borgerliges-aarsmoede-2021 -2025-06-24 14:48:37,940 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:48:38,663 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:48:39,884 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:48:42,101 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/mette-thiesens-tale-ved-nye-borgerliges-aarsmoede-2021 -2025-06-24 14:48:42,357 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:48:43,097 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:48:44,340 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:48:46,560 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/peter-seier-christensens-tale-ved-nye-borgerliges-aarsmoede-2021 -2025-06-24 14:50:22,691 - INFO - Attempt 1 failed. Retrying in 0.50 seconds... -2025-06-24 14:50:23,446 - INFO - Attempt 2 failed. Retrying in 1.00 seconds... -2025-06-24 14:50:24,662 - INFO - Attempt 3 failed. Retrying in 2.00 seconds... -2025-06-24 14:50:26,911 - INFO - Failed to fetch license after 3 attempts: 500 Server Error: Internal Server Error for url: https://www.dansketaler.dk/tale/silke-ena-svares-tale-ved-demonstrationen-for-born-og-unge -2025-06-24 15:02:20,338 - INFO - Saving 5107 speeches to dataset -2025-06-24 15:02:20,503 - INFO - Unique licenses: -2025-06-24 15:02:20,503 - INFO - None -2025-06-24 15:02:20,503 - INFO - cc0 -2025-06-24 15:02:20,503 - INFO - Materialet et beskyttet af ophavsret -2025-06-24 15:02:20,503 - INFO - Materialet er beskyttet af ophavsret -2025-06-24 15:02:20,503 - INFO - Materialet er omfattet af ophavsret -2025-06-24 15:02:20,503 - INFO - Manuskript taget fra ft.dk. med tilladelse fra udgiver. -2025-06-24 15:02:20,503 - INFO - Materialet er beskyttet af ophavsret -2025-06-24 15:02:20,503 - INFO - Manuskript taget fra ft.dk med tilladelse fra udgiver. -2025-06-24 15:02:20,503 - INFO - Materialet er beskyttet af ophavsret -2025-06-24 15:02:20,503 - INFO - Materialet er beskyttet af ophavsret, da talen ikke er holdt i offentligheden. -2025-06-24 15:02:20,503 - INFO - Manuskript tilsendt af taler og udgivet af Danske Taler med tilladelse fra taler. -2025-06-24 15:02:20,506 - INFO - Removed 2191 documents without a cc0 license -2025-06-24 15:02:20,508 - INFO - Removed 0 duplicate ids -2025-06-24 15:02:20,516 - INFO - Removed 2 rows with empty text -2025-06-24 15:02:20,529 - INFO - Removed 2 rows with duplicate text diff --git a/data/danske-taler/danske-taler.md b/data/danske-taler/danske-taler.md deleted file mode 100644 index 396d1eb015281489e2b8e25a5311eaa86575a9d8..0000000000000000000000000000000000000000 --- a/data/danske-taler/danske-taler.md +++ /dev/null @@ -1,135 +0,0 @@ ---- -pretty_name: Danske Taler -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Conversation -- Speeches -- Spoken ---- - -# Dataset Card for Danske Taler - - -Danish Speeches from [dansketaler.dk](https://www.dansketaler.dk). - - - -The database dansketaler.dk is managed by Danske Taler, an independent institution that in addition to managing the database and carries out cultural -and democratic projects based on speeches. -Danske Taler state as their goals that they seek to preserve our cultural heritage and promotes active citizenship and democratic confidence through its work. -Additionally, Danske Taler provides data to a number of online resources, including: lex.dk, sprogteknologi.dk, and ordnet.dk. - -The goal of the dataset is to collect historical and timely speeches and make them available for the public. - -Learn more about danske taler by reading their [about us](https://www.dansketaler.dk/om-os) page. - -> NOTE: Danske-Taler is also collecting [sermons](https://www.dansketaler.dk/praedikener), but these are not included in this dataset. - -## Dataset Description - - - -- **Number of samples**: 2.91K -- **Number of tokens (Llama 3)**: 8.72M -- **Average document length in tokens (min, max)**: 3.00K (129, 53.40K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "danske-taler_281", - "text": "Tyske landsmænd og -kvinder !\nSyv år er kort tid, en brøkdel af en enkel menneskelig normaltilværels[...]", - "source": "danske-taler", - "added": "2025-06-24", - "created": "1940-01-30, 1940-01-30", - "token_count": 3020 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - -### Dataset Collection Process - -This dataset was collected using the publicly available [API](https://www.dansketaler.dk/api/v1). - -### Quality Assurance -We check for and remove exact duplicates, empty texts, duplicate ids after the initial download. We additionally check if the articles contain any HTML. - -## Opportunities for Improvement - -While this dataset can be updated to include the latest availabe speeches. - -We consider the quality of the current collection high with a low chance of -incorrect formatting, -spelling errors, -empty documents or -misformatted segments. -This stems both from the quality assurance, source of documents and subjective inspection. - -### License Information -Since the license information isn't avaiable through the API we collect this data directly from the webpage of each article under the header -"Ophavsret". - -For speeches where it is noted that *"Materialet er fri af ophavsret"* (The material is in the public domain) or similarly we assign it a `cc0` license. - -Such an example can be seen here: - -> **Ophavsret** -> -> Materialet er fri af ophavsret. Taler, som er holdt i offentligheden, er ikke omfattet af ophavsret (Jf. ophavsretslovens § 26 og 32). -> Det betyder, at når en tale er indgået i Danske Talers database, kan den bruges af tredjeparter, fx til undervisning eller forskning. -> -> *source: [Ursula von der Leyens tale om europæisk forsvar og sikkerhed på Hærens Officersskole](https://www.dansketaler.dk/tale/tale-om-europaeisk-forsvar-og-sikkerhed-pa-haerens-officersskole)* - -Speeches without this mention is removed. Such an example include: - -> **Ophavsret** -> -> Materialet er beskyttet af ophavsret -> -> *Source: [Christina Egelunds tale ved Aarhus Universitets årsfest](https://www.dansketaler.dk/tale/christina-egelunds-tale-ved-aarhus-universitets-arsfest)* - -We manually checked the unique set of license descriptions to see if any were open licenses that weren't included in the current criteria. - -For specific filtering criteria see the `create.py` script. - -### Citation Information - -No citation is applicable for this work. We recommend citing the huggingface repository. diff --git a/data/danske-taler/danske-taler.parquet b/data/danske-taler/danske-taler.parquet deleted file mode 100644 index 6708d695ecb62e6eaee6e8a9e26fd4c162a868d9..0000000000000000000000000000000000000000 --- a/data/danske-taler/danske-taler.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d007e606854f868febcf61a513302f7299ff35222fe9de487d17b9baaaedf248 -size 16089529 diff --git a/data/danske-taler/descriptive_stats.json b/data/danske-taler/descriptive_stats.json deleted file mode 100644 index 98b8d87aad535213c182f0fa039568f0625e1906..0000000000000000000000000000000000000000 --- a/data/danske-taler/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 2912, - "number_of_tokens": 8723951, - "min_length_tokens": 129, - "max_length_tokens": 53401, - "number_of_characters": 26616908, - "min_length_characters": 388, - "max_length_characters": 155429 -} \ No newline at end of file diff --git a/data/danske-taler/images/dist_document_length.png b/data/danske-taler/images/dist_document_length.png deleted file mode 100644 index aa1d4f9bb645ba555fb9b5e5581ca885a7e63c66..0000000000000000000000000000000000000000 --- a/data/danske-taler/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8a6cc3946783f2d8e4725e50acc17b4ffbc84c38bb521253a5c2dca9087aa34d -size 552936 diff --git a/data/depbank/depbank.md b/data/depbank/depbank.md index d3e43a33b3ca02954a0af4d958fada8f585ad932..c277ebbb4e18c0cab9362be4a5e870b393d30117 100644 --- a/data/depbank/depbank.md +++ b/data/depbank/depbank.md @@ -1,115 +1,51 @@ --- pretty_name: Danish Dependency Treebank language: -- da + - da license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 +license_name: Creative Commons Attribution Share Alike 4.0 size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Other + - language-modeling --- - # Dataset Card for Danish Dependency Treebank - - -The Danish subsection of the [Universal Dependencies Treebank](https://github.com/UniversalDependencies/UD_Danish-DDT). - - - -The Danish UD treebank has been converted from the Danish Dependency Treebank (Buch-Kromman, 2003) into Universal Dependencies (UD). It consists of 5,512 sentences (100k words). The Danish source texts and the Danish part-of-speech tags were created by the PAROLE-DK project (Keson 1998) by the Danish Society for Language and Literature. - -While the dataset was initially intended as a rich annotation, this corpora only uses the raw text. - ## Dataset Description - - - -- **Number of samples**: 536 -- **Number of tokens (Llama 3)**: 185.45K -- **Average document length in tokens (min, max)**: 345.99626865671644 (261, 517) - - - - -## Dataset Structure +- **Number of records:** 536 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "depbank_0375", - "text": "\nH.L. Hansen var en usædvanmlig og frodig personlighed. Han skabte \nglæde og munterhed omkring sig o[...]", - "source": "depbank", - "added": "2024-05-16", - "created": "2000-01-01, 2022-01-01", - "token_count": 389 + 'text': 'H.L. Hansen var en usædvanmlig og frodig personlig', + 'source': 'depbank', + 'id': 'depbank_0375', + 'added': '2024-05-16', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Other', + 'license': 'Attribution-ShareAlike 4.0 International', + 'source-pretty': 'Danish Dependency Treebank' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: +## Data Fields -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. - -### Dataset Statistics - - -

- +## License Information +

+Creative Commons Attribution Share Alike 4.0 +

+Attribution-ShareAlike 4.0 International

- - - - -## Additional Information - - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/depbank/depbank.parquet b/data/depbank/depbank.parquet index 21ff7a1ab374bbab7d0a839efd95b2cd9fba1f10..81d21fe18850262120c1ea9bcc1f8a40ac6fc5ef 100644 --- a/data/depbank/depbank.parquet +++ b/data/depbank/depbank.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:86febe315dae1089432da27d7b0c96a9a9bc0920d030563a35680416ac231e6f -size 392289 +oid sha256:863aac5735bee6995b665864ea355b488e35bb2cca696ea340d8febc653b8886 +size 394917 diff --git a/data/depbank/descriptive_stats.json b/data/depbank/descriptive_stats.json deleted file mode 100644 index 1c5b7460a60c8b66b3a93964a13377f915997214..0000000000000000000000000000000000000000 --- a/data/depbank/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 536, - "number_of_tokens": 185454, - "min_length_tokens": 261, - "max_length_tokens": 517, - "number_of_characters": 546130, - "min_length_characters": 773, - "max_length_characters": 1398 -} \ No newline at end of file diff --git a/data/depbank/images/dist_document_length.png b/data/depbank/images/dist_document_length.png deleted file mode 100644 index bcf39d87eb1bbf06064a23203cd99fb118b147ae..0000000000000000000000000000000000000000 --- a/data/depbank/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d61b39a37be40d593e91cca7127f8ee3c3a3a1dcbad52609ac61e4c7ae59a798 -size 539460 diff --git a/data/domsdatabasen/create.py b/data/domsdatabasen/create.py deleted file mode 100644 index cf925107f6634118676337bf83a5fe5671f5572d..0000000000000000000000000000000000000000 --- a/data/domsdatabasen/create.py +++ /dev/null @@ -1,344 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "dynaword", -# "marker-pdf", -# "requests", -# "torch", -# ] -# -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword" } -# /// - -""" -Script for downloading and processing the Domsdatabasen.dk site. - -Note: To run this script, you need to set `GIT_LFS_SKIP_SMUDGE=1` to be able to install dynaword: - -```bash -GIT_LFS_SKIP_SMUDGE=1 uv run data/domsdatabasen/create.py -``` - -Note: This script is designed to be run using a GPU. -""" - -import atexit -import logging -import os -import csv -import time -from typing import cast - -import torch - -import gc -import requests -import torch.multiprocessing as mp -from pathlib import Path -from datetime import date, datetime - -from datasets import Dataset, concatenate_datasets -from marker.converters.pdf import PdfConverter -from marker.models import create_model_dict -from marker.output import text_from_rendered - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) - -# ----------------- Config ------------------ - -PDF_DIR = Path(__file__).parent / "pdfs" -LOG_FILE = Path(__file__).parent / "progress_log.csv" -PARQUET_FILE = Path(__file__).parent / "domsdatabasen.parquet" -MAX_WORKERS = 10 -RETRY_COUNT = 3 -RETRY_DELAY = 2 - -# ----------------- Headers ------------------ - -HEADERS = { - "Accept": "application/json, text/plain, */*", - "Accept-Encoding": "gzip, deflate, br, zstd", - "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", - "Connection": "keep-alive", - "Content-Type": "application/json", -} - - -def init_csv(): - if not LOG_FILE.exists(): - with open(LOG_FILE, "w", newline="", encoding="utf-8") as f: - writer = csv.DictWriter( - f, - fieldnames=["document_id", "pdf_downloaded", "text_extracted", "error"], - ) - writer.writeheader() - - -def append_log(document_id: str, pdf: bool, text: bool, error: str = ""): - with open(LOG_FILE, "a", newline="", encoding="utf-8") as f: - writer = csv.DictWriter( - f, fieldnames=["document_id", "pdf_downloaded", "text_extracted", "error"] - ) - writer.writerow( - { - "document_id": document_id, - "pdf_downloaded": int(pdf), - "text_extracted": int(text), - "error": error, - } - ) - - -def load_existing_ids() -> set: - if not PARQUET_FILE.exists(): - return set() - ds = Dataset.from_parquet(str(PARQUET_FILE)) - ds = cast(Dataset, ds) - return set(ds["id"]) - - -# ----------------- Retry Helpers ------------------ - - -def retry(func, *args, retries=RETRY_COUNT, delay=RETRY_DELAY, **kwargs): - for attempt in range(retries): - try: - return func(*args, **kwargs) - except Exception as e: - logger.warning(f"⚠️ Retry {attempt + 1}/{retries} failed: {e}") - time.sleep(delay) - raise RuntimeError(f"❌ All retries failed for {func.__name__}({args})") - - -# ----------------- PDF Download ------------------ - - -def download_pdf(document: dict) -> Path | None: - document_id = document["id"] - out_path = PDF_DIR / f"document_{document_id}.pdf" - if out_path.exists(): - logger.info(f"⏭️ Skipped PDF (exists): {document_id}") - return out_path - - url = f"https://domsdatabasen.dk/webapi/api/Case/document/download/{document_id}" - try: - response = retry(requests.get, url, headers=HEADERS) - if response.status_code == 200: - with open(out_path, "wb") as f: - f.write(response.content) - logger.info(f"✅ Downloaded PDF: {document_id}") - append_log(document_id, pdf=True, text=False) - return out_path - else: - raise RuntimeError(f"Download failed: {response.status_code}") - except Exception as e: - append_log(document_id, pdf=False, text=False, error=str(e)) - return None - - -# ----------------- Parallel Extract Text ------------------ - - -def worker_init(): - model_dict = create_model_dict() - - global model_refs - model_refs = model_dict - - # Ensure we clean up the model references on exit - atexit.register(worker_exit) - - -def worker_exit(): - global model_refs - try: - del model_refs - except Exception: - pass - - -def process_document(document: dict) -> dict | None: - # from marker.output import text_from_rendered - # from marker.converters.pdf import PdfConverter - - torch.set_num_threads(2) - - document_id = document["id"] - verdict_date = document.get("verdictDateTime") - pdf_path = PDF_DIR / f"document_{document_id}.pdf" - - if not pdf_path.exists(): - url = ( - f"https://domsdatabasen.dk/webapi/api/Case/document/download/{document_id}" - ) - try: - response = retry(requests.get, url, headers=HEADERS) - if response.status_code == 200: - with open(pdf_path, "wb") as f: - f.write(response.content) - logger.info(f"✅ Downloaded PDF: {document_id}") - else: - raise RuntimeError(f"Download failed: {response.status_code}") - except Exception as e: - append_log(document_id, pdf=False, text=False, error=str(e)) - return None - - config = {"pdftext_workers": 1, "extract_images": False, "disable_tqdm": True} - - try: - converter = PdfConverter(artifact_dict=model_refs, config=config) - rendered = retry(converter, str(pdf_path)) - text, _, _ = text_from_rendered(rendered) - logger.info(f"🖍️ Extracted text: {document_id}") - append_log(document_id, pdf=True, text=True) - - del rendered - del converter - - return { - "id": document_id, - "text": text, - "source": "Domsdatabasen", - "created": format_created(verdict_date), - "added": date.today().isoformat(), - "metadata": {}, - } - except Exception as e: - append_log(document_id, pdf=True, text=False, error=str(e)) - return None - finally: - gc.collect() - - -# ----------------- Page Fetching ------------------ - - -def fetch_case_page(page_num: int) -> tuple[list[dict], int]: - url = f"https://domsdatabasen.dk/webapi/api/Case/advanced?sorting=VerdictDateDesc&page={page_num}&pageSize=100" - response = retry(requests.post, url, headers=HEADERS, json={}) - data = response.json() - - document_entries = [] - for case in data.get("cases", []): - for doc in case.get("documents", []): - document_entries.append( - { - "id": doc["id"], - "verdictDateTime": doc.get("verdictDateTime"), - } - ) - - return document_entries, data.get("pageCount", 1) - - -# ----------------- Utilities ------------------ - - -def format_created(verdict_date: str | None) -> str: - if verdict_date: - try: - dt = datetime.fromisoformat(verdict_date) - formatted = dt.date().isoformat() - return f"{formatted}, {formatted}" - except Exception: - pass - today = date.today().isoformat() - return f"{today}, {today}" - - -# ----------------- Main Loop ------------------ - - -def main(): - PDF_DIR.mkdir(exist_ok=True) - init_csv() - - all_records = [] - page_num = 1 - _, total_pages = fetch_case_page(1) - logger.info(f"📄 Total pages: {total_pages}") - - existing_ids = load_existing_ids() - logger.info(f"🔄 Resuming with {len(existing_ids)} already processed IDs") - - while page_num <= total_pages: - logger.info(f"\n🔎 Fetching page {page_num}/{total_pages}") - - try: - doc_infos, _ = fetch_case_page(page_num) - except Exception as e: - logger.warning(f"❌ Failed to fetch page {page_num}: {e}") - page_num += 1 - continue - - doc_infos = [doc for doc in doc_infos if doc["id"] not in existing_ids] - - # Extract text in parallel using multiprocessing - with mp.Pool( - processes=MAX_WORKERS, initializer=worker_init, maxtasksperchild=10 - ) as pool: - results = pool.map(process_document, doc_infos) - - all_records.extend([r for r in results if r]) - - if all_records: - ds_new = Dataset.from_list(all_records) - - if PARQUET_FILE.exists(): - ds_old = Dataset.from_parquet(str(PARQUET_FILE)) - ds_old = cast(Dataset, ds_old) - ds_combined = concatenate_datasets([ds_old, ds_new]) - else: - ds_combined = ds_new - - ds_combined.to_parquet(str(PARQUET_FILE)) - logger.info(f"📦 Appended {len(all_records)} records to {PARQUET_FILE}") - existing_ids.update([r["id"] for r in all_records]) - all_records.clear() - - page_num += 1 - - ds = Dataset.from_parquet(str(PARQUET_FILE)) - ds = cast(Dataset, ds) - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = add_token_count(ds) - ds = ensure_column_order(ds) - - ds.to_parquet(str(PARQUET_FILE)) - - -if __name__ == "__main__": - # Ensure threads don't contend - os.environ["MKL_DYNAMIC"] = "FALSE" - os.environ["OMP_DYNAMIC"] = "FALSE" - os.environ["OMP_NUM_THREADS"] = "2" # Avoid OpenMP issues with multiprocessing - os.environ["OPENBLAS_NUM_THREADS"] = "2" - os.environ["MKL_NUM_THREADS"] = "2" - os.environ["GRPC_VERBOSITY"] = "ERROR" - os.environ["GLOG_minloglevel"] = "2" - os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = ( - "1" # Transformers uses .isin for a simple op, which is not supported on MPS - ) - os.environ["IN_STREAMLIT"] = "true" # Avoid multiprocessing inside surya - - mp.set_start_method("spawn", force=True) - log_path = Path(__file__).parent / "domsdatabasen.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/domsdatabasen/descriptive_stats.json b/data/domsdatabasen/descriptive_stats.json deleted file mode 100644 index 6df1393ef376617934cb94586bfb7212b12eb340..0000000000000000000000000000000000000000 --- a/data/domsdatabasen/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 8468, - "number_of_tokens": 86353024, - "min_length_tokens": 15, - "max_length_tokens": 1008826, - "number_of_characters": 256036077, - "min_length_characters": 35, - "max_length_characters": 3021437 -} \ No newline at end of file diff --git a/data/domsdatabasen/domsdatabasen.md b/data/domsdatabasen/domsdatabasen.md deleted file mode 100644 index 00dd866c5614cdfd9166ff8e72f5f01bac4e4df6..0000000000000000000000000000000000000000 --- a/data/domsdatabasen/domsdatabasen.md +++ /dev/null @@ -1,119 +0,0 @@ ---- -pretty_name: Domsdatabasen.dk -language: -- da -license: other -license_name: Danish Copyright Law -size_categories: -- 10k-100k -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Legal ---- - -# Dataset Card for Domsdatabasen.dk - - -[Domsdatabasen.dk](https://domsdatabasen.dk/) is a public database containing selected judgments from the Danish courts. - - -Launched in early 2022, the platform aims to increase transparency and public insight into the workings of the judiciary in Denmark. It is accessible to everyone – legal professionals, citizens, companies, and public authorities interested in Danish case law. - -## Dataset Description - -### Purpose and Scope -The main goal of the database is to support the principle of openness in the administration of justice. It offers users access to selected civil and criminal decisions, with an initial focus on rulings from the higher courts, such as: - -- The Supreme Court (Højesteret) -- The High Courts (Landsretterne) -- The Maritime and Commercial Court (Sø- og Handelsretten) - -Some rulings from the district courts (byretterne) are also included, particularly when they are part of a case string that has been appealed. -Over time, the database will expand in coverage and volume, especially as the court system transitions to new digital case management systems. - -### Pseudonymization and Data Protection -All published rulings are pseudonymized to protect the privacy of individuals involved, in accordance with the EU General Data Protection Regulation (GDPR), the Danish Data Protection Act, and rules from the Danish Data Protection Agency. - -Pseudonymization involves replacing personally identifiable information (e.g., names, CPR numbers) with general terms such as “the accused”, “witness 1”, etc. Additional data such as addresses or health-related details may be redacted or pseudonymized based on a case-specific evaluation. - -Some roles and names are not pseudonymized, including: - -- Judges from higher courts -- Legal representatives (lawyers) -- Author names in cited legal literature (unless directly involved in the case) -- Names in EU court decisions - -Businesses involved in cases are typically not pseudonymized unless their name reveals personal information or constitutes a trade secret. - -### Access and Development -Domsdatabasen is continuously being developed. As digitization progresses and technical workflows improve, the number of published decisions is expected to grow. The judgments are published as full case strings, including decisions at multiple judicial levels, providing context and legal reasoning throughout the appeal process. - - - -- **Number of samples**: 8.47K -- **Number of tokens (Llama 3)**: 86.35M -- **Average document length in tokens (min, max)**: 10.20K (15, 1.01M) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "11389", - "text": "## **Ikke grundlag for varetægtsfængsling af hensyn til retshåndhævelsen**\n\nDer var ikke særligt bes[...]", - "source": "Domsdatabasen", - "added": "2025-07-04", - "created": "2025-07-04, 2025-07-04", - "token_count": 796 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -## License Information -
-Danish Copyright Law -

-Danish Copyright law at https://www.retsinformation.dk/forms/r0710.aspx?id=164796 states - - § 9. Love, administrative forskrifter, retsafgørelser og lignende offentlige aktstykker er ikke genstand for ophavsret. - -Stk. 2. Bestemmelsen i stk. 1 gælder ikke for værker, der fremtræder som selvstændige bidrag i de i stk. 1 nævnte aktstykker. Sådanne værker må dog gengives i forbindelse med aktstykket. Retten til videre udnyttelse afhænger af de i øvrigt gældende regler. - -

-
- - -### Dataset Statistics - - -

- -

- - - -## Additional Information - -**Extraction of text:** The documents being downloaded from [domsdatabasen.dk](https://www.domsdatabasen.dk/) is PDFs. To extract the texts from those, the `create.py` script uses the [marker-pdf](https://github.com/datalab-to/marker/tree/master) package. diff --git a/data/domsdatabasen/domsdatabasen.parquet b/data/domsdatabasen/domsdatabasen.parquet deleted file mode 100644 index d0d47868a53e043cfd86153fbbe4690912e6f3c9..0000000000000000000000000000000000000000 --- a/data/domsdatabasen/domsdatabasen.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:132f593c951564e56c262520116bd02eea193f10443b9d12305e130dde16ee99 -size 123195077 diff --git a/data/domsdatabasen/images/dist_document_length.png b/data/domsdatabasen/images/dist_document_length.png deleted file mode 100644 index aaa6cc6f5cb52688a0ae91353bf7cbfb3af7f640..0000000000000000000000000000000000000000 --- a/data/domsdatabasen/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:47efb3cce555370325986d99b3b1f9c817e54b72eff8f9fde0d3c887bfa59af3 -size 558899 diff --git a/data/enevaeldens_nyheder/create.py b/data/enevaeldens_nyheder/create.py deleted file mode 100644 index 96487783a55b42a48f1be218dd6bc6a3b7ba71d9..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/create.py +++ /dev/null @@ -1,96 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "dynaword", -# ] -# -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword" } -# /// - -""" -Script for downloading and processing the dataset - -Note: To run this script, you need to set `GIT_LFS_SKIP_SMUDGE=1` to be able to install dynaword: - -```bash -GIT_LFS_SKIP_SMUDGE=1 uv run data/enevaeldens_nyheder/create.py -``` -""" - -import logging -from datetime import date -from pathlib import Path -from typing import Any, cast - -from datasets import Dataset, load_dataset - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) - -SOURCE = "enevaeldens_nyheder" - - -def reformat_samples(example: dict[str, Any]) -> dict[str, Any]: - creation_date = example["date"] - # Reformatting the date to YYYY-MM-DD format - start = creation_date - end = creation_date - return { - "id": f"{SOURCE}_{example['id']}", - "text": example["text"], - "source": SOURCE, - "added": date.today().strftime("%Y-%m-%d"), - "created": f"{start}, {end}", - } - - -def main(): - dataset = load_dataset( - "JohanHeinsen/ENO", - split="train", - revision="009f45ef63a1a41705781840807eb620f380d17d", - ) - dataset = cast(Dataset, dataset) - - logger.info("Removing 1 word texts") - len_ds = len(dataset) - dataset = dataset.filter( - lambda x: len(x["text"].split()) >= 2 - ) # require at least 2 word in the text - logger.info(f"Filtered {len_ds - len(dataset)} 1 word examples") - - logger.info("Filtering out texts with predicted word acuracy < 0.7") - dataset = dataset.filter(lambda x: x["pwa"] >= 0.7) - logger.info(f"Filtered {len_ds - len(dataset)} low accuracy examples") - - dataset = dataset.map(reformat_samples) - - dataset = remove_empty_texts(dataset) # remove rows with empty text - dataset = remove_duplicate_text(dataset) # remove rows with duplicate text - dataset = add_token_count(dataset) - dataset = ensure_column_order(dataset) - - dataset.to_parquet( - Path(__file__).parent / f"{SOURCE}.parquet", - ) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{SOURCE}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/enevaeldens_nyheder/descriptive_stats.json b/data/enevaeldens_nyheder/descriptive_stats.json deleted file mode 100644 index 8bccbb8245d1198ffd54554340fe07d405b711ad..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 4593228, - "number_of_tokens": 1034308344, - "min_length_tokens": 3, - "max_length_tokens": 37294, - "number_of_characters": 2889445364, - "min_length_characters": 4, - "max_length_characters": 111182 -} \ No newline at end of file diff --git a/data/enevaeldens_nyheder/enevaeldens_nyheder.log b/data/enevaeldens_nyheder/enevaeldens_nyheder.log deleted file mode 100644 index f03e6827bebffade8d6d98364050de4b255374a2..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/enevaeldens_nyheder.log +++ /dev/null @@ -1,9 +0,0 @@ -2025-08-05 13:09:29,533 - INFO - Removing 1 word texts -2025-08-05 13:10:14,475 - INFO - Filtered 42635 1 word examples -2025-08-05 13:10:14,475 - INFO - Filtering out texts with predicted word acuracy < 0.7 -2025-08-05 13:11:24,300 - INFO - Filtered 76655 low accuracy examples -2025-08-05 13:15:33,389 - INFO - Removing empty texts -2025-08-05 13:15:50,876 - INFO - Filtered 0 empty examples -2025-08-05 13:15:50,876 - INFO - Removing duplicate texts -2025-08-05 13:19:48,194 - INFO - Filtered 161196 duplicate examples -2025-08-05 13:32:46,967 - INFO - Ensuring columns are in the correct order and are present diff --git a/data/enevaeldens_nyheder/enevaeldens_nyheder.md b/data/enevaeldens_nyheder/enevaeldens_nyheder.md deleted file mode 100644 index 6d3c88a10df39e0880670790c6b9bc2ae2cf2c53..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/enevaeldens_nyheder.md +++ /dev/null @@ -1,172 +0,0 @@ ---- -pretty_name: "Enev\xE6ldens Nyheder Online" -language: -- da -license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- News -source_datasets: -- JohanHeinsen/ENO ---- - -# Dataset Card for Enevældens Nyheder Online - -![](images/header_img.jpeg) - -High quality OCR'd texts from Danish and Norwegian newspapers during the period of constitutional absolutism in Denmark (1660–1849). - - - -During the eighteenth century, newspapers became a ubiquitous medium. They informed a relatively large reading public about everything from high politics to the mundanities of local markets. -The dataset was created by re-processing over 550.000 digital images scanned from microfilm and held in the Danish Royal Library's collection. They had initially been OCR-processed, but the results were generally unreadable. ENO reprocessed the images using tailored pylaia models in Transkribus. The OCR-quality is generally high, despite the difficult state of the original images. -The newspaper editions have been segmented into individual texts using a model designed by the project team. Such texts are the base entity of the dataset. They include mainly two genres: news items and advertisements. - -## Dataset Description - - - -- **Number of samples**: 4.59M -- **Number of tokens (Llama 3)**: 1.03B -- **Average document length in tokens (min, max)**: 225.1811458085686 (3, 37.29K) - - - -* **Curated by**: Johan Heinsen and Camilla Bøgeskov, Historisk Datalaboratorium, Aalborg University. With assistance from Sofus Landor Dam, Anders Birkemose, Kamilla Matthiassen and Louise Karoline Sort. -* **Funded by**: MASSHINE, Aalborg University. - - -The dataset contains a wide range of newspapers. The total distribution can be studied here. They cover most of Denmark as well as the three oldest newspapers of Norway, running until the separation of the Danish-Norwegian conglomerate in 1814. This dataset represents version 0.9 (updated 5th of August 2025). - - -### Dataset Sources - -The sources of the dataset can be studied in more detail at the [project website](https://hislab.quarto.pub/eno/). -Most of the original image material is available in [LOAR](https://loar.kb.dk/handle/1902/7803) – a data repository of the Danish Royal Library. The Norwegian material was downloaded via the API of Nettbiblioteket. The scans of Nyeste Skilderie af Kjøbenhavn were taken from the Internet Archive repository of [Niels Jensen](https://archive.org/details/@uforbederlig). The scans for Politivennen stem from [Københavns Biblioteker](https://bibliotek.kk.dk/din/bag-om-kobenhavn/politivennen). Some early newspapers come from recent scans made available to the project by the Danish Royal Library. These are not yet available online. - -## Uses - -This dataset represents an effort to enable analysis of Denmark-Norway in the seventeenth, eighteenth, and nineteenth centuries. The data can be used to study and model sentiments, political and cultural currents, and the minutiae of urban life. - -In addition, this dataset is part of Danish Dynaword, a collection of datasets intended for training language models, thereby integrating Danish cultural heritage into the next generation of digital technologies. - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "enevaeldens_nyheder_aalborg1767_1767-01-02_1000001", - "text": "Et Menneske er skabt ey for sig selv allene: Hvert Lem paa Legemet det heele tiene maae, En Stolpes [...]", - "source": "enevaeldens_nyheder", - "added": "2025-08-05", - "created": "1767-01-02, 1767-01-02", - "token_count": 2377 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - - -## Dataset Creation - -### Curation Rationale - -The newspapers in the dataset generally represent the longest-running newspaper series in the Danish and Norwegian libraries. We prioritised long-running newspapers to enable historical analysis of changes over time. As historians, this was our initial ambition: to allow us to get quality serial text data. -We also prioritised geographical diversity, representing different regions of Denmark-Norway. Of course, this varies over time, as newspapers were most common in Copenhagen until the late eighteenth century. -Since the newspapers of Denmark's Caribbean colonies were primarily in English, they are not included. The text recognition model designed for the project struggles with English text. -Besides long-running series, we also included a few smaller newspaper series, mainly with an eye towards diversity of subject matter. These include Politivennen, which was concerned with very local news from Copenhagen and carried a lot of reader contributions, offering a unique insight into urban sentiments at the time. A similar inclusion was made with Jyllandsposten (of 1838), which was defined by a somewhat radical rural horizon. - -As a rule of thumb, publications have been digitised in total, as they exist in their respective collections. -This means that they sometimes include appendices and sometimes do not, depending on whether these exist. Holes in the dataset mirror holes in the archival collections. -The one exception to this rule is the newspaper Københavns Adresseavis. This advertisement paper has survived continuously from its inception in 1759, but from 1804 onwards, it is only included here with samples every fifth year. -The reason for sampling is a combination of the massive extent of this advertisement paper and the poor condition of the digital images available for this specific period. Combined this meant that the results of the text recognition process were not entirely satisfying relative to the resources necessary for the effort. Therefore, we decided to prioritize other publications that would yield better quality text. - -Most publications contain title page marginalia (date, title, etc.). Because these were set with large ornamental types, they are typically recognised with much less accuracy than the regular text. We are currently working on implementing a step in the workflow to identify and filter out these elements. - -### Data Collection and Processing - -The text recognition model used to create the dataset is available via [Transkribus](https://app.transkribus.org/models/public/text/danish-newspapers-1750-1850). A description of the text segmentation process can be found [here](https://hislab.quarto.pub/eno/dokumentation.html). Besides segmentation into separate news items / advertisements, no further processing of the text has taken place. We are currently experimenting with automated error correction using decoder-models. - -For Danish Dynaword we apply additional filtering including: - -- 1) Removing 1 word documents (using a whitespace split) -- 2) Removing document with a PWA < 0.7 - -PWA is defined as: - -> A predicted word accuracy [PWA] based on a dictionary consisting of words from literary works, personal names and place names from the census of 1787, and a manually curated list of common words that are present in the material, but not represented in canonical literature. This is an estimate. In general we advise that you filter the dataset on this variable in case of using the material for language modelling. This will also filter out texts in other languages than Danish. -> -> source: [JohanHeinsen/ENO](https://huggingface.co/datasets/JohanHeinsen/ENO#dataset-structure) - -Below you see 10 examples of documents (truncated to 200 characters) filtered out due to the PWA filtering: - -``` -['Under Staders Segl. nespil.', - 'Frisk Selter=, Permonter=, Bitter, og FachingerVand bekommes paa Løveapotheket.', - 'Søesyglinsk, Christoph. Auf Anordning der Liquidations=Commission, den ten August 1834. (Ges.) Mitglied der Commission, Regierungsrath: Pestof. Stellvertretender Secretair. Gabriel Ostrowski.', - 'J de Reussiske Koge: Bordelummer Seil.', - 'Scriptores historiae Byzantinae vird bei uns un entgeltlich ansgegeben. Anch sind bei und fortige Bogen dieses Werkes in den verschiedenen Ansgeden auf Druck., Schreibe und Velinpapier niedergelegt, z', - 'Gammel Conjac. Potten.', - 'NOTIFICATION. Von der 5ten Classe, der 7ten Königl. allen privilegitten Kopenhagner Lotteren, deren Ziehung den 17ten Passati geendiget worden, werden die Gewinne den 8ten hujus und følgende Werkeltag', - 'Jm Verlag des Unterzeichneten har die Presse verlassen: Uever dis religiøse Bestimmung der Jugend, in einigen Predigten von K. C. von Gehren. Jn dieser Samlung sind følgende Gegenstande behandelt: 1) ', - "ditoyens fortund, ) vous qui, loin des combats, Pouves jouir en pair dans vos heureur ClimatsDes trefors annuel d'unne moisson fertileDont il plait aux saisons de couronner votre ile, Vous, diseje, a ", - 'AVERTISSEMENTS. Ausser der am Seelandischen Langericht geschehene Proclamation, wird auch hiedurch zu dreien mahlen kund gethan, das die Theilungs Berichtigung nach dem menland Johann Georg Kanneworff'] - ``` - -### Dataset Statistics - - -

- -

- - -The coverage of the newspapers included can be seen here: - -![](images/coverage-of-newspapers.jpeg) - -The distribution of texts pr. year is as follows: - -![](images/distribution-pr-year.jpeg) - - -## Personal and Sensitive Information - -Due to the historical nature of the data, ENO contains no personal or sensitive information. - -## Bias, Risks, and Limitations - -The data reflects the time of its initial creation. This means that it mirrors and describes a deeply hierarchical society that was structured by deep-seated biases and forms of discrimination that are alien even to some of the worst among the living today. For example, the material contains racist language in describing contemporary phenomena such as the Transatlantic slave trade and the persecution of Jewish diasporas. Use cases which might relay or perpetuate such sentiments should be aware of these risks. It is a historical text corpora, warts and all. - -Please also note that, although the newspapers are all in Danish, they do contain intermittent passages in German and Latin. - -Some advertisements were reprinted verbatim. The dataset, therefore, includes occasional duplicate texts. - - -### License Information - -The dataset is licensed under CC BY-SA 4.0. Please note that this license only pertains to the digitised text and dataset curation, not the original images. The original images of all material stemming from The Danish Royal Library, Nettbiblioteket, Københavns Biblioteker as well as the scans of Nyeste Skilderie af Kiøbenhavn made available by Niels Jensen are all in the public domain. - -## More Information - -For questions related to the dataset, curation, and annotation we please contact Johan Heinsen, Aalborg University diff --git a/data/enevaeldens_nyheder/enevaeldens_nyheder.parquet b/data/enevaeldens_nyheder/enevaeldens_nyheder.parquet deleted file mode 100644 index 9b6b2111c557178b2246e59a13f94bb17c0c9abd..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/enevaeldens_nyheder.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8f0ccbf865189f37c9735e001219ef85da11ea3b5849621993a995f138c7f51d -size 1856788258 diff --git a/data/enevaeldens_nyheder/images/coverage-of-newspapers.jpeg b/data/enevaeldens_nyheder/images/coverage-of-newspapers.jpeg deleted file mode 100644 index 08824c01d39e2f926b478f5adbe43592f87588ca..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/images/coverage-of-newspapers.jpeg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:66d18149a4d2050eaef38ae8c8b6ee101bebcdffa124d1accde5414198a4b198 -size 1082268 diff --git a/data/enevaeldens_nyheder/images/dist_document_length.png b/data/enevaeldens_nyheder/images/dist_document_length.png deleted file mode 100644 index 2b8bc17a4a249ceea8bbb39331333ca8c153e8c8..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:af6e89096c36f019d387dcb9b8a249f6a1aad6008fc1f31f94cdb83572ef2cd0 -size 579348 diff --git a/data/enevaeldens_nyheder/images/distribution-pr-year.jpeg b/data/enevaeldens_nyheder/images/distribution-pr-year.jpeg deleted file mode 100644 index dc47fc89c4e49c2962107bf9fe21c4e64f74b687..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/images/distribution-pr-year.jpeg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c27f03153532cf2b52db3a541fa1406495696dcfe13b56519d685d2d7ab6f101 -size 529650 diff --git a/data/enevaeldens_nyheder/images/header_img.jpeg b/data/enevaeldens_nyheder/images/header_img.jpeg deleted file mode 100644 index 5d889bf6be86936474841a0c7ee109507db85ee2..0000000000000000000000000000000000000000 --- a/data/enevaeldens_nyheder/images/header_img.jpeg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4d4c90b67829124c82be5bdbc74159ebdd5cd26802f6708def6a7751aa9f01aa -size 326751 diff --git a/data/ep/descriptive_stats.json b/data/ep/descriptive_stats.json deleted file mode 100644 index b23041d9755732f2fa41e5b4df4406a371480d45..0000000000000000000000000000000000000000 --- a/data/ep/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 3930, - "number_of_tokens": 100841366, - "min_length_tokens": 8, - "max_length_tokens": 222729, - "number_of_characters": 311885257, - "min_length_characters": 14, - "max_length_characters": 688878 -} \ No newline at end of file diff --git a/data/ep/ep.md b/data/ep/ep.md index ba801013ed9211547514f9ae041abc7cf99a2130..52bec996d5638e7eda86f6eca3431de15e4ab7c7 100644 --- a/data/ep/ep.md +++ b/data/ep/ep.md @@ -1,97 +1,55 @@ --- pretty_name: European Parliament language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Conversation -- Spoken + - language-modeling --- - # Dataset Card for European Parliament - - -The Danish subsection of [Europarl](https://aclanthology.org/2005.mtsummit-papers.11/). - - - -The europarl is a corpus of parallel text in 11 languages from the proceedings of the European Parliament, which are published on the web. This corpus has found widespread use in the NLP community. It was initially intended as training data for statistical machine translation. - - ## Dataset Description - - - -- **Number of samples**: 3.93K -- **Number of tokens (Llama 3)**: 100.84M -- **Average document length in tokens (min, max)**: 25.66K (8, 222.73K) - - - - -## Dataset Structure +- **Number of records:** 4213 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "ep_07-02-01-008", - "text": "TALER 6703: Jeg har stemt for henstillingen om godkendelse af opdelingsanordninger til beskyttelse a[...]", - "source": "ep", - "added": "2019-11-20", - "created": "2004-01-01, 2009-01-01", - "token_count": 16237 + 'text': 'TALER 6703: Jeg har stemt for henstillingen om god', + 'source': 'ep', + 'id': 'ep_07-02-01-008', + 'added': '2019-11-20', + 'created': '2004-01-01, 2009-01-01', + 'metadata': { + 'domain': 'Conversation', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'European Parliament' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: +## Data Fields -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -### Dataset Statistics +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code - -

- +CC0 1.0 Universal

- - - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/ep/ep.parquet b/data/ep/ep.parquet index 88bbb9965bf345db187fa92207d065eb99f4bc75..86d6276a48cc780f2ab45eac7452c75f6022a73b 100644 --- a/data/ep/ep.parquet +++ b/data/ep/ep.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:59b66509c2456a50d1831d75b69c263684a1a207a44191f7eea2353909816839 -size 170737571 +oid sha256:85c8eb6954522c757ee3e410f7f277a74ecedd8e7507ef00a698a654dc8bea20 +size 171150568 diff --git a/data/ep/images/dist_document_length.png b/data/ep/images/dist_document_length.png deleted file mode 100644 index e1c0479a7a0e597f15294195e7fc392383bf8e51..0000000000000000000000000000000000000000 --- a/data/ep/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:deec5bb16ae671112df5fdbde9a44e5cd56ea7abf1cda061aff9dd2f3c367a1f -size 554717 diff --git a/data/eur-lex-sum-da/create.py b/data/eur-lex-sum-da/create.py deleted file mode 100644 index 1f6cca2d889c60f4d2c0c6bc1c664280628fd658..0000000000000000000000000000000000000000 --- a/data/eur-lex-sum-da/create.py +++ /dev/null @@ -1,50 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0", -# ] -# /// - -from pathlib import Path -from typing import cast - -from datasets import Dataset, load_dataset - -source = "eur-lex-sum-da" - - -def convert_sample(example): - new_example = dict( - text_new=example["text"], - source=source, - domain="Legal", - license="cc-by-sa-4.0", - added="2025-03-24", - created="2024-01-01, 2025-01-01", # Scrape happen within the year - data likely written earlier - metadata={"source-pretty": "Eur-lex-sum-da"}, - ) - - return new_example - - -def main(): - data_path = Path( - "/work/dfm-data/pre-training/eur-lex-sum-da/documents/eur-lex-sum-da.jsonl.gz" - ) - ds = load_dataset("json", data_files=data_path.as_posix(), split="train") - - ds = cast(Dataset, ds) - - ds = ds.map(convert_sample, remove_columns=ds.column_names) - ds = ds.rename_columns({"text_new": "text"}) - ds = ds.add_column("id", [f"{source}_{i}" for i in range(len(ds))]) # type: ignore - ds = ds.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - save_path = Path(__file__).parent / f"{source}.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/eur-lex-sum-da/descriptive_stats.json b/data/eur-lex-sum-da/descriptive_stats.json deleted file mode 100644 index a9797a869da93eaac1da2b2d390c1274d8046ef5..0000000000000000000000000000000000000000 --- a/data/eur-lex-sum-da/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 1002, - "number_of_tokens": 31367665, - "min_length_tokens": 2144, - "max_length_tokens": 1723932, - "number_of_characters": 87802625, - "min_length_characters": 6439, - "max_length_characters": 2979072 -} \ No newline at end of file diff --git a/data/eur-lex-sum-da/eur-lex-sum-da.md b/data/eur-lex-sum-da/eur-lex-sum-da.md deleted file mode 100644 index 1ce556a93738e8a657eed295f2821f01bb27b8c9..0000000000000000000000000000000000000000 --- a/data/eur-lex-sum-da/eur-lex-sum-da.md +++ /dev/null @@ -1,81 +0,0 @@ ---- -pretty_name: EUR-Lex SUM -language: -- da -license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Legal ---- - -# Dataset Card for EUR-Lex SUM - - -The Danish subsection of EUR-lex SUM consisting of EU legislation paired with professionally written summaries. - - -EUR-Lex SUM is a dataset containing summaries of EU legislation from the EUR-Lex database. It consists of pairs of full legal texts and their corresponding professionally written summaries, covering European Union legal documents. -The dataset is designed for training and evaluating automatic text summarization systems, particularly for legal documents. It's valuable for natural language processing (NLP) research since it provides high-quality, human-written summaries of complex legal texts in a specialized domain. - - -## Dataset Description - - -- **Number of samples**: 1.00K -- **Number of tokens (Llama 3)**: 31.37M -- **Average document length in tokens (min, max)**: 31.31K (2.14K, 1.72M) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "eur-lex-sum-da_0", - "text": "21.6.2019\nDA\nDen Europæiske Unions Tidende\nL 166/26\nKOMMISSIONENS DELEGEREDE FORORDNING (EU) 2019/98[...]", - "source": "eur-lex-sum-da", - "added": "2025-03-24 00:00:00", - "created": "2024-01-01, 2025-01-01", - "token_count": 148017 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - - - -### Citation Information - -No citation is applicable for this work. diff --git a/data/eur-lex-sum-da/eur-lex-sum-da.parquet b/data/eur-lex-sum-da/eur-lex-sum-da.parquet deleted file mode 100644 index 805548174e26d14e3aaf9355bae96dfb201edb01..0000000000000000000000000000000000000000 --- a/data/eur-lex-sum-da/eur-lex-sum-da.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7a3253316826455f318967ea6877367a222154c5b6cf47234dcff67182d3798b -size 35854369 diff --git a/data/eur-lex-sum-da/images/dist_document_length.png b/data/eur-lex-sum-da/images/dist_document_length.png deleted file mode 100644 index e6edb766a139715058c5c82fd00a0234dd27c92d..0000000000000000000000000000000000000000 --- a/data/eur-lex-sum-da/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:06acd694598c74f6dc412221982c46fc6dac85a438dd467436e833863e72b749 -size 564705 diff --git a/data/fm-udgivelser/create.py b/data/fm-udgivelser/create.py deleted file mode 100644 index bed0c21cb827874d9b0aaaf1e1cdfb5f8d182e97..0000000000000000000000000000000000000000 --- a/data/fm-udgivelser/create.py +++ /dev/null @@ -1,50 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0", -# ] -# /// - -from pathlib import Path -from typing import cast - -from datasets import Dataset, load_dataset - -source = "fm-udgivelser" - - -def convert_sample(example): - new_example = dict( - text_new=example["text"], - source=source, - domain="Legal", - license="cc-by-sa-4.0", - added="2025-03-24", - created="2024-01-01, 2026-01-01", # Scrape happen within these years - data likely written earlier - metadata={"source-pretty": "Finansministeriets Udgivelser"}, - ) - - return new_example - - -def main(): - data_path = Path( - "/work/dfm-data/pre-training/fm-udgivelser/documents/finans-ministeriet.jsonl.gz" - ) - ds = load_dataset("json", data_files=data_path.as_posix(), split="train") - - ds = cast(Dataset, ds) - - ds = ds.map(convert_sample, remove_columns=ds.column_names) - ds = ds.rename_columns({"text_new": "text"}) - ds = ds.add_column("id", [f"{source}_{i}" for i in range(len(ds))]) # type: ignore - ds = ds.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - save_path = Path(__file__).parent / f"{source}.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/fm-udgivelser/descriptive_stats.json b/data/fm-udgivelser/descriptive_stats.json deleted file mode 100644 index 7103a058e6797d05fb6b36b4f6b75109d295c70f..0000000000000000000000000000000000000000 --- a/data/fm-udgivelser/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 443, - "number_of_tokens": 50335291, - "min_length_tokens": 209, - "max_length_tokens": 595325, - "number_of_characters": 217115155, - "min_length_characters": 576, - "max_length_characters": 3791854 -} \ No newline at end of file diff --git a/data/fm-udgivelser/fm-udgivelser.md b/data/fm-udgivelser/fm-udgivelser.md deleted file mode 100644 index c37e29e1b898705f100ca8c758f3fe3ba5d93231..0000000000000000000000000000000000000000 --- a/data/fm-udgivelser/fm-udgivelser.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -pretty_name: Finansministeriets Udgivelser -language: -- da -license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Legal ---- - -# Dataset Card for Finansministeriets Udgivelser - - -The official publication series of the Danish Ministry of Finance containing economic analyses, budget proposals, and fiscal policy documents. - - -Finansministeriets Udgivelser (translated as "Publications of the Ministry of Finance") is the publishing arm or publication series of the Danish Ministry of Finance. It includes official reports, economic analyses, budget proposals, fiscal policy documents, and various other publications related to Denmark's public finances, economic policy, and financial governance. - -These publications typically provide insights into Denmark's economic outlook, public spending plans, tax policies, and financial reforms. They serve as important reference materials for economists, policy makers, researchers, and citizens interested in understanding Denmark's financial policies and economic direction. - -The publications are authoritative sources of information on Danish fiscal policy and are often used by various stakeholders to track and analyze the country's economic performance and public finance management. - - -## Dataset Description - - -- **Number of samples**: 443 -- **Number of tokens (Llama 3)**: 50.34M -- **Average document length in tokens (min, max)**: 113.62K (209, 595.33K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "fm-udgivelser_0", - "text": "\n\nFinanslov for\n\nfinansåret 2023 Tekst og anmærkninger\n\n§ 1. Dronningen\n\n\n\n\n\n§ 1.\n\nDronningen\n\nTekst[...]", - "source": "fm-udgivelser", - "added": "2025-03-24", - "created": "2024-01-01, 2026-01-01", - "token_count": 1354 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - - -### Citation Information - -No citation is applicable for this work. diff --git a/data/fm-udgivelser/fm-udgivelser.parquet b/data/fm-udgivelser/fm-udgivelser.parquet deleted file mode 100644 index cd75cd07200cac7e5c4f9d4dc0d732e1d4b6958d..0000000000000000000000000000000000000000 --- a/data/fm-udgivelser/fm-udgivelser.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8ed08f5d20a99af7ababd78c95ae8219064ac3466b0507353a8799e88a7a749d -size 59887215 diff --git a/data/fm-udgivelser/images/dist_document_length.png b/data/fm-udgivelser/images/dist_document_length.png deleted file mode 100644 index 59f08678f2e982344bf73e1b6b16a7fb44aaf317..0000000000000000000000000000000000000000 --- a/data/fm-udgivelser/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:259da6f82e99bf7818cf72e7473f5ef7169d3c1201d9756833634821c75737ce -size 556453 diff --git a/data/ft/descriptive_stats.json b/data/ft/descriptive_stats.json deleted file mode 100644 index 2ab10527a64cdf8d8cd924d77fc15fb0b9022e91..0000000000000000000000000000000000000000 --- a/data/ft/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 1315, - "number_of_tokens": 114087231, - "min_length_tokens": 49, - "max_length_tokens": 342318, - "number_of_characters": 350769927, - "min_length_characters": 110, - "max_length_characters": 1041051 -} \ No newline at end of file diff --git a/data/ft/ft.md b/data/ft/ft.md index 989efbba0d1845ea358ef21967c5b06a663c8595..4dcc1f73434500617446e88849f4e764e3e23b30 100644 --- a/data/ft/ft.md +++ b/data/ft/ft.md @@ -1,98 +1,55 @@ --- -pretty_name: Folketinget +pretty_name: Folketinget (Danish Parliament) language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Conversation -- Spoken + - language-modeling --- - -# Dataset Card for Folketinget - +# Dataset Card for Folketinget (Danish Parliament) ## Dataset Description - - -Records from all meetings of The Danish parliament (Folketinget) in the parliament hall. - - - -All records have a transcript produced by commercial Automatic Speech Recognition (ASR) followed by postediting by linguists employed by Folketinget for intelligibility, i.e., edit out dysfluencies, restarts, repairs, and mistakes. The transcript is, therefore, not a representation of spoken Danish but rather information content. - -In the parliament hall, one speaker at a time addresses members of the parliament. Monologues may include rebuttals or other comments to statements in previous monologues. While speakers can read aloud from a prepared statement or speak extemporaneously, we expect no difference to be apparent in the data because of the post-editing. The Folketinget section covers parliament hall sessions between 2009 and 2019. It contains discussions on a wide range of topics, issues, and named entities relevant to Danish society. - - - -- **Number of samples**: 1.31K -- **Number of tokens (Llama 3)**: 114.09M -- **Average document length in tokens (min, max)**: 86.76K (49, 342.32K) - - - - -## Dataset Structure +- **Number of records:** 1315 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "ft_20121M100", - "text": "TALER 50: Mødet er åbnet. I dag er der følgende anmeldelser: Ministeren for by, bolig og landdistrik[...]", - "source": "ft", - "added": "2021-03-28", - "created": "2009-01-01, 2019-01-01", - "token_count": 84355 + 'text': 'TALER 50: Mødet er åbnet. I dag er der følgende an', + 'source': 'ft', + 'id': 'ft_20121M100', + 'added': '2021-03-28', + 'created': '2009-01-01, 2019-01-01', + 'metadata': { + 'domain': 'Conversation', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Folketinget (Danish Parliament)' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## Data Fields +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -### Dataset Statistics +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/ft/ft.parquet b/data/ft/ft.parquet index 1fb3f6e77e3f2fbdab8e7dc4fa24f4ec4892d05c..b181d9e13522e4f4262e11f6fbabf3290c106f7e 100644 --- a/data/ft/ft.parquet +++ b/data/ft/ft.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:10fccf9ed35f7c1a845c2dd9abeff2121c8e0424586e20658335c06ec805dc4e -size 181932436 +oid sha256:31775c6e84a1542897641712e39d4c6cde2aa69673d7875c6a39f3148c08e0fb +size 182049520 diff --git a/data/ft/images/dist_document_length.png b/data/ft/images/dist_document_length.png deleted file mode 100644 index 87fc9937d06f8cdb6ff7c53239127b082de654c2..0000000000000000000000000000000000000000 --- a/data/ft/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2f96e2a26274c944073e060e4c558f2e25cfe6bf15d05ad4cd65be1994826f70 -size 545520 diff --git a/data/grundtvig/create.py b/data/grundtvig/create.py deleted file mode 100644 index d1470f2d5289c08ddfc4690371277571b91da40d..0000000000000000000000000000000000000000 --- a/data/grundtvig/create.py +++ /dev/null @@ -1,85 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "dynaword", -# ] -# -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword" } -# /// - -""" -Script for downloading and processing the dataset - -Note: To run this script, you need to set `GIT_LFS_SKIP_SMUDGE=1` to be able to install dynaword: - -```bash -GIT_LFS_SKIP_SMUDGE=1 uv run data/grundtvig/create.py -``` -""" - -import logging -from datetime import date -from pathlib import Path -from typing import Any, cast - -from datasets import Dataset, load_dataset - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) - -SOURCE = "grundtvig" - - -def reformat_samples(example: dict[str, Any]) -> dict[str, Any]: - year_of_creation = example["id"].split("_")[0] - # Reformatting the date to YYYY-MM-DD format - start = f"{year_of_creation}-01-01" - end = f"{year_of_creation}-12-31" - return { - "id": f"grundtvig_{example['id']}", - "text": example["md"], - "source": SOURCE, - "added": date.today().strftime("%Y-%m-%d"), - "created": f"{start}, {end}", - } - - -def main(): - dataset = load_dataset( - "chcaa/grundtvigs-works", - split="train", - revision="945dd72c1e902632ed581d90c8ff1571ef211a63", - ) - dataset = cast(Dataset, dataset) - - dataset = dataset.map(reformat_samples) - - dataset = remove_empty_texts(dataset) # remove rows with empty text - dataset = remove_duplicate_text(dataset) # remove rows with duplicate text - dataset = add_token_count(dataset) - dataset = ensure_column_order(dataset) - - dataset.to_parquet( - Path(__file__).parent / f"{SOURCE}.parquet", - ) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{SOURCE}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/grundtvig/descriptive_stats.json b/data/grundtvig/descriptive_stats.json deleted file mode 100644 index 3aa920306150149fec3dbbbdd8e97445463a81de..0000000000000000000000000000000000000000 --- a/data/grundtvig/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 632, - "number_of_tokens": 10525393, - "min_length_tokens": 100, - "max_length_tokens": 453718, - "number_of_characters": 29669190, - "min_length_characters": 298, - "max_length_characters": 1285256 -} \ No newline at end of file diff --git a/data/grundtvig/grundtvig.log b/data/grundtvig/grundtvig.log deleted file mode 100644 index 29b50d8992fce02c2fe020f3e54d09fd0684155a..0000000000000000000000000000000000000000 --- a/data/grundtvig/grundtvig.log +++ /dev/null @@ -1,10 +0,0 @@ -2025-07-21 22:26:35,895 - INFO - Removing empty texts -2025-07-21 22:26:36,148 - INFO - Filtered 0 empty examples -2025-07-21 22:26:36,148 - INFO - Removing duplicate texts -2025-07-21 22:26:36,292 - INFO - Filtered 0 duplicate examples -2025-07-21 22:26:42,922 - INFO - Ensuring columns are in the correct order and are present -2025-07-21 22:27:36,078 - INFO - Removing empty texts -2025-07-21 22:27:36,086 - INFO - Filtered 0 empty examples -2025-07-21 22:27:36,086 - INFO - Removing duplicate texts -2025-07-21 22:27:36,088 - INFO - Filtered 0 duplicate examples -2025-07-21 22:27:36,848 - INFO - Ensuring columns are in the correct order and are present diff --git a/data/grundtvig/grundtvig.md b/data/grundtvig/grundtvig.md deleted file mode 100644 index db127db39c78c51242ecd12ff814246b59705de5..0000000000000000000000000000000000000000 --- a/data/grundtvig/grundtvig.md +++ /dev/null @@ -1,183 +0,0 @@ ---- -pretty_name: Grundtvig's Works -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Books -source_datasets: -- chcaa/grundtvigs-works ---- - -# Dataset Card for Grundtvig's Works - - -The complete collection of [Grundtvig](https://en.wikipedia.org/wiki/N._F._S._Grundtvig) (1783-1872) one of Denmark’s most influential figures. - - - -Grundtvig's Works is a comprehensive digital humanities dataset containing the complete collected writings of -[Nicolai Frederik Severin Grundtvig](https://en.wikipedia.org/wiki/N._F._S._Grundtvig) (1783-1872) was one of Denmark’s most influential cultural and intellectual figures. -As a critical edition, it includes editorial commentary by philologists and is continually updated. -The project is scheduled for completion in 2030 and will comprise 1,000 individual works spanning 35,000 pages. The complete edition is freely available online. - -## Dataset Description - - - -- **Number of samples**: 632 -- **Number of tokens (Llama 3)**: 10.53M -- **Average document length in tokens (min, max)**: 16.65K (100, 453.72K) - - - -This dataset represents version 1.25 (updated May 2025) of the digital scholarly edition of Grundtvig’s Works, comprising 632 texts by N.F.S. Grundtvig. -All texts have been OCR-scanned, and each is processed through three separate textual collations. We compare three different first editions, identify variants between them, -and incorporate their differences into the digitized version. - -Following collation, we enrich the texts with several layers of annotation, marked up in XML according to TEI P5 guidelines. - -These include: - -- *Explanatory commentaries* – clarifying older words or shifts in meaning -- *Named entities* – identifying people, places, titles, and mythological figures -- *Emendations* – documenting any corrections (no silent changes are made) -- *Bible references* - allusions, quotations, and explicit references are identified and categorized according to type and source -- -We also provide introductory texts and textual essays to offer historical and interpretive context. Before publication, each text undergoes a triple review process. - - - -### Dataset Sources - -- **Dataset Website:** [www.grundtvigsværker.dk](https://www.grundtvigsværker.dk) - -## Uses - -This dataset represents a major digital preservation effort of Denmark's literary and intellectual heritage, providing structured access to works that shaped Danish theology, education, democracy, and cultural identity. It's valuable for research in digital humanities, Scandinavian studies, religious history, and 19th-century European thought. - -In addition, this dataset is also a part of [Danish Dynaword](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword), a collection of dataset intended for training language models, thus integrating Danish Cultural Heritage into the next generation of digital technologies. - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "grundtvig_1824_392_txt", - "text": "---\ntitle: Søgubrot med Efterklang\nauthor: Nicolai Frederik Severin Grundtvig\ndate: 2019-12-03\npubli[...]", - "source": "grundtvig", - "added": "2025-07-21", - "created": "1824-01-01, 1824-12-31", - "token_count": 4106 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Dataset Creation - -### Curation Rationale - -The digital scholarly edition of Grundtvig’s Works was created to provide open, reliable, and annotated access to the entire published oeuvre of N.F.S. Grundtvig (1783–1872), one of Denmark’s most influential thinkers. The rationale behind this effort is twofold: public accessibility and scholarly accuracy. - -On the one hand, the edition enables the general public to read Grundtvig’s works on their own terms, supported by textual commentary that helps decode complex 19th-century language and theological or philosophical concepts. On the other, the edition serves as a scholarly tool, offering a searchable, critically edited, and TEI-encoded corpus that facilitates in-depth research across disciplines. - -Grundtvig’s writings have had a lasting influence on Danish culture, education, and national identity. They are frequently referenced in contemporary political and cultural debates. However, many of his texts have until now only existed in fragile first editions or scattered, outdated collections. By digitizing, editing, and annotating his complete published works – including posthumous publications central to his public image – the project ensures both preservation and access. - -The primary motivation behind creating this dataset was to bridge the gap between Grundtvig’s historical significance and the limited access to his writings. By offering a freely accessible digital edition, the project not only preserves a vital part of Danish cultural heritage but also democratizes access to foundational texts in Danish intellectual history. This aligns with both public interest and scholarly needs: to make Grundtvig’s complex legacy understandable, searchable, and usable in modern contexts. - -The edition was launched in 2010 by the Center for Grundtvig Studies and is scheduled for completion in 2030. It is funded by the Danish Finance Act, ensuring its continued development as a national cultural and scholarly resource. - - -#### Data Collection and Processing - -All texts in the Grundtvig’s Works dataset originate from printed first editions of N.F.S. Grundtvig’s published writings. The digitization process begins with OCR (optical character recognition) scanning of the original editions. Following OCR, each text undergoes three separate textual collations: we compare three different first editions of the same work to identify textual variants. Differences are systematically incorporated into the digitized version to ensure accuracy and representational fidelity. - -After collation, the editorial team performs further corrections based on internal editorial review. The final result is a fully digitized, TEI P5 XML–encoded version of each text. -On top of this, all works are accompanied by facsimiles – high-resolution images of the first printed editions – allowing users to view the original sources alongside the transcribed and annotated texts. - - -Before upload the dataset was provided as XML and .txt files. The XML files were converted to .md and uploaded to Huggingface. The scripts for conversion and upload can be found [here](https://huggingface.co/datasets/chcaa/grundtvigs-works/tree/main/src) and a [.lock](https://huggingface.co/datasets/chcaa/grundtvigs-works/tree/main) file specifying the version can be found here. - - -#### Who are the source data producers? - -N.F.S. Grundtvig is the author of the source material. The Center for Grundtvig Studies at Aarhus University curates, digitizes, and maintains the dataset. - -### Annotations - -We annotate explanatory commentaries, named entities (people, places, titles, and mythological figures), biblical references and emendations (documenting corrections). Only emendations are part of this dataset. -The annotation process began in 2010 and will continue until 2030. - - -#### Who are the annotators? - -The editorial team consists of 12 philologists and 5 student assistants and one editor in chief. - -As of 2025, the team includes 13 female and 5 male staff members. - -### Formatting - -The samples are currently formatted as markdown using a frontmatter which contain information about the author, year of digitization etc. - -#### Personal and Sensitive Information - -This dataset contains no personal or sensitive information. - -## Bias, Risks, and Limitations - -The Grundtvig’s Works dataset contains texts written in 19th-century Danish and reflects the linguistic, cultural, and ideological norms of its time. -As such, it includes perspectives, assumptions, and biases characteristic of the period. -Readers should be aware that the author, N.F.S. Grundtvig, expressed strong personal and political opinions, -including nationalistic views and critical stances toward specific groups – such as Germans – which may be considered offensive or exclusionary by contemporary standards. - - -### License Information - -N.F.S. Grundtvig's works fall under Public Domain (CC0) - - -### Citation Information - -Studies where the dataset from Grundtvig’s Works has been used: - -- Baunvig, K. F., & Nielbo, K. L. (2022). Mermaids are Birds: Embedding N.F.S. Grundtvig’s Bestiary. I K. Berglund, M. La Mela, & I. Zwart (red.), Proceedings of the 6th Digital Humanities in the Nordic and Baltic Countries Conference (DHNB 2022) (Bind 3232, s. 23-32). CEUR-WS.org. http://ceur-ws.org/Vol-3232/paper02.pdf -- Baunvig, K. F., Jarvis, O., & Nielbo, K. L. (2021). Emotional Imprints: Exclamation Marks in N.F.S. Grundtvig's Writings. I S. Reinsone, I. Skadiņa, J. Daugavietis, & A. Baklāne (red.), Post-Proceedings of the 5th Conference Digital Humanities in the Nordic Countries (DHN 2020) (s. 156-169) http://ceur-ws.org/Vol-2865/short7.pdf -- Nielbo, K.L., Baunvig, K. F., Liu, B., & Gao, J. (2019). A Curious Case of Entropic Decay: Persistent Complexity in Textual Cultural Heritage. Digital Scholarship in the Humanities, Volume 34, Issue 3, September 2019, Pages 542–557, https://doi.org/10.1093/llc/fqy054. - -## More Information - -For questions related to the dataset, curation, and annotation we please contact [Center for Grundtvig Studies](https://grundtvigcenteret.au.dk/) - -The edition is funded by the Danish Finance Act diff --git a/data/grundtvig/grundtvig.parquet b/data/grundtvig/grundtvig.parquet deleted file mode 100644 index e49bc3714778127ebe7c739f65bf2cb69b915030..0000000000000000000000000000000000000000 --- a/data/grundtvig/grundtvig.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5242e74ddc101d6b57c77be29a1cac7f4b80dc0b3399d71a8703aeb7ae2acce6 -size 18711422 diff --git a/data/grundtvig/images/dist_document_length.png b/data/grundtvig/images/dist_document_length.png deleted file mode 100644 index c28efce0f66b04ede88321005613be6c3ccb72b5..0000000000000000000000000000000000000000 --- a/data/grundtvig/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:33b70fc2bf607a0371912bb2acbb77a54dcc19c38d62ff6cfbdcfe3d9f5a502f -size 552594 diff --git a/data/gutenberg/descriptive_stats.json b/data/gutenberg/descriptive_stats.json deleted file mode 100644 index 8e489e734ae8d66967a5f128f874c609a64d9520..0000000000000000000000000000000000000000 --- a/data/gutenberg/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 66, - "number_of_tokens": 6763317, - "min_length_tokens": 7915, - "max_length_tokens": 250511, - "number_of_characters": 19149764, - "min_length_characters": 24572, - "max_length_characters": 713986 -} \ No newline at end of file diff --git a/data/gutenberg/gutenberg.md b/data/gutenberg/gutenberg.md index b851813901fe39ca59b321f9c202e0dcfd8ae07b..e49bad392160c7b9eafbd156c3a094ea20bbaaaa 100644 --- a/data/gutenberg/gutenberg.md +++ b/data/gutenberg/gutenberg.md @@ -1,79 +1,375 @@ --- pretty_name: Gutenberg language: -- da -license: other -license_name: Gutenberg + - da +license: Gutenberg License +license_name: Gutenberg License size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Books + - language-modeling --- - # Dataset Card for Gutenberg - ## Dataset Description +- **Number of records:** 66 +- **Languages:** Danish +## Dataset Sturcture +An example from the dataset looks as follows. +```yaml +{ + 'text': 'Afskriverens bemærkninger: Åbenlyse trykfejl er re', + 'source': 'gutenberg', + 'id': 'gutenberg_43899', + 'added': '2020-09-12', + 'created': '1700-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': '*** START: FULL LICENSE *** - -The Danish subsection from Project [Gutenberg](https://www.gutenberg.org). - +THE FULL PROJECT GUTENBERG LICENSE +PLEASE READ THIS BEFORE YOU DISTRIBUTE OR USE THIS WORK +To protect the Project Gutenberg-tm mission of promoting the free +distribution of electronic works, by using or distributing this work +(or any other work associated in any way with the phrase "Project +Gutenberg"), you agree to comply with all the terms of the Full Project +Gutenberg-tm License available with this file or online at + www.gutenberg.org/license. -Project Gutenberg is an online library of free eBooks. Project Gutenberg was the first provider of free electronic books, or eBooks. +Section 1. General Terms of Use and Redistributing Project Gutenberg-tm +electronic works - -- **Number of samples**: 66 -- **Number of tokens (Llama 3)**: 6.76M -- **Average document length in tokens (min, max)**: 102.47K (7.92K, 250.51K) - +1.A. By reading or using any part of this Project Gutenberg-tm +electronic work, you indicate that you have read, understand, agree to +and accept all the terms of this license and intellectual property +(trademark/copyright) agreement. If you do not agree to abide by all +the terms of this agreement, you must cease using and return or destroy +all copies of Project Gutenberg-tm electronic works in your possession. +If you paid a fee for obtaining a copy of or access to a Project +Gutenberg-tm electronic work and you do not agree to be bound by the +terms of this agreement, you may obtain a refund from the person or +entity to whom you paid the fee as set forth in paragraph 1.E.8. +1.B. "Project Gutenberg" is a registered trademark. It may only be +used on or associated in any way with an electronic work by people who +agree to be bound by the terms of this agreement. There are a few +things that you can do with most Project Gutenberg-tm electronic works +even without complying with the full terms of this agreement. See +paragraph 1.C below. There are a lot of things you can do with Project +Gutenberg-tm electronic works if you follow the terms of this agreement +and help preserve free future access to Project Gutenberg-tm electronic +works. See paragraph 1.E below. +1.C. The Project Gutenberg Literary Archive Foundation ("the Foundation" +or PGLAF), owns a compilation copyright in the collection of Project +Gutenberg-tm electronic works. Nearly all the individual works in the +collection are in the public domain in the United States. If an +individual work is in the public domain in the United States and you are +located in the United States, we do not claim a right to prevent you from +copying, distributing, performing, displaying or creating derivative +works based on the work as long as all references to Project Gutenberg +are removed. Of course, we hope that you will support the Project +Gutenberg-tm mission of promoting free access to electronic works by +freely sharing Project Gutenberg-tm works in compliance with the terms of +this agreement for keeping the Project Gutenberg-tm name associated with +the work. You can easily comply with the terms of this agreement by +keeping this work in the same format with its attached full Project +Gutenberg-tm License when you share it without charge with others. -## Dataset Structure -An example from the dataset looks as follows. +1.D. The copyright laws of the place where you are located also govern +what you can do with this work. Copyright laws in most countries are in +a constant state of change. If you are outside the United States, check +the laws of your country in addition to the terms of this agreement +before downloading, copying, displaying, performing, distributing or +creating derivative works based on this work or any other Project +Gutenberg-tm work. The Foundation makes no representations concerning +the copyright status of any work in any country outside the United +States. +1.E. Unless you have removed all references to Project Gutenberg: - -```py -{ - "id": "gutenberg_43899", - "text": "Afskriverens bemærkninger: Åbenlyse trykfejl er rettet\ni denne e-bog, men forfatterens stavning er f[...]", - "source": "gutenberg", - "added": "2020-09-12", - "created": "1700-01-01, 2022-01-01", - "token_count": 128423 -} -``` +1.E.1. The following sentence, with active links to, or other immediate +access to, the full Project Gutenberg-tm License must appear prominently +whenever any copy of a Project Gutenberg-tm work (any work on which the +phrase "Project Gutenberg" appears, or with which the phrase "Project +Gutenberg" is associated) is accessed, displayed, performed, viewed, +copied or distributed: + +This eBook is for the use of anyone anywhere at no cost and with +almost no restrictions whatsoever. You may copy it, give it away or +re-use it under the terms of the Project Gutenberg License included +with this eBook or online at www.gutenberg.org + +1.E.2. If an individual Project Gutenberg-tm electronic work is derived +from the public domain (does not contain a notice indicating that it is +posted with permission of the copyright holder), the work can be copied +and distributed to anyone in the United States without paying any fees +or charges. If you are redistributing or providing access to a work +with the phrase "Project Gutenberg" associated with or appearing on the +work, you must comply either with the requirements of paragraphs 1.E.1 +through 1.E.7 or obtain permission for the use of the work and the +Project Gutenberg-tm trademark as set forth in paragraphs 1.E.8 or +1.E.9. + +1.E.3. If an individual Project Gutenberg-tm electronic work is posted +with the permission of the copyright holder, your use and distribution +must comply with both paragraphs 1.E.1 through 1.E.7 and any additional +terms imposed by the copyright holder. Additional terms will be linked +to the Project Gutenberg-tm License for all works posted with the +permission of the copyright holder found at the beginning of this work. -### Data Fields +1.E.4. Do not unlink or detach or remove the full Project Gutenberg-tm +License terms from this work, or any files containing a part of this +work or any other work associated with Project Gutenberg-tm. -An entry in the dataset consists of the following fields: +1.E.5. Do not copy, display, perform, distribute or redistribute this +electronic work, or any part of this electronic work, without +prominently displaying the sentence set forth in paragraph 1.E.1 with +active links or immediate access to the full terms of the Project +Gutenberg-tm License. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +1.E.6. You may convert to and distribute this work in any binary, +compressed, marked up, nonproprietary or proprietary form, including any +word processing or hypertext form. However, if you provide access to or +distribute copies of a Project Gutenberg-tm work in a format other than +"Plain Vanilla ASCII" or other format used in the official version +posted on the official Project Gutenberg-tm web site (www.gutenberg.org), +you must, at no additional cost, fee or expense to the user, provide a +copy, a means of exporting a copy, or a means of obtaining a copy upon +request, of the work in its original "Plain Vanilla ASCII" or other +form. Any alternate format must include the full Project Gutenberg-tm +License as specified in paragraph 1.E.1. +1.E.7. Do not charge a fee for access to, viewing, displaying, +performing, copying or distributing any Project Gutenberg-tm works +unless you comply with paragraph 1.E.8 or 1.E.9. +1.E.8. You may charge a reasonable fee for copies of or providing +access to or distributing Project Gutenberg-tm electronic works provided +that -## License Information +- You pay a royalty fee of 20% of the gross profits you derive from + the use of Project Gutenberg-tm works calculated using the method + you already use to calculate your applicable taxes. The fee is + owed to the owner of the Project Gutenberg-tm trademark, but he + has agreed to donate royalties under this paragraph to the + Project Gutenberg Literary Archive Foundation. Royalty payments + must be paid within 60 days following each date on which you + prepare (or are legally required to prepare) your periodic tax + returns. Royalty payments should be clearly marked as such and + sent to the Project Gutenberg Literary Archive Foundation at the + address specified in Section 4, "Information about donations to + the Project Gutenberg Literary Archive Foundation." +- You provide a full refund of any money paid by a user who notifies + you in writing (or by e-mail) within 30 days of receipt that s/he + does not agree to the terms of the full Project Gutenberg-tm + License. You must require such a user to return or + destroy all copies of the works possessed in a physical medium + and discontinue all use of and all access to other copies of + Project Gutenberg-tm works. + +- You provide, in accordance with paragraph 1.F.3, a full refund of any + money paid for a work or a replacement copy, if a defect in the + electronic work is discovered and reported to you within 90 days + of receipt of the work. + +- You comply with all other terms of this agreement for free + distribution of Project Gutenberg-tm works. + +1.E.9. If you wish to charge a fee or distribute a Project Gutenberg-tm +electronic work or group of works on different terms than are set +forth in this agreement, you must obtain permission in writing from +both the Project Gutenberg Literary Archive Foundation and Michael +Hart, the owner of the Project Gutenberg-tm trademark. Contact the +Foundation as set forth in Section 3 below. + +1.F. + +1.F.1. Project Gutenberg volunteers and employees expend considerable +effort to identify, do copyright research on, transcribe and proofread +public domain works in creating the Project Gutenberg-tm +collection. Despite these efforts, Project Gutenberg-tm electronic +works, and the medium on which they may be stored, may contain +"Defects," such as, but not limited to, incomplete, inaccurate or +corrupt data, transcription errors, a copyright or other intellectual +property infringement, a defective or damaged disk or other medium, a +computer virus, or computer codes that damage or cannot be read by +your equipment. + +1.F.2. LIMITED WARRANTY, DISCLAIMER OF DAMAGES - Except for the "Right +of Replacement or Refund" described in paragraph 1.F.3, the Project +Gutenberg Literary Archive Foundation, the owner of the Project +Gutenberg-tm trademark, and any other party distributing a Project +Gutenberg-tm electronic work under this agreement, disclaim all +liability to you for damages, costs and expenses, including legal +fees. YOU AGREE THAT YOU HAVE NO REMEDIES FOR NEGLIGENCE, STRICT +LIABILITY, BREACH OF WARRANTY OR BREACH OF CONTRACT EXCEPT THOSE +PROVIDED IN PARAGRAPH 1.F.3. YOU AGREE THAT THE FOUNDATION, THE +TRADEMARK OWNER, AND ANY DISTRIBUTOR UNDER THIS AGREEMENT WILL NOT BE +LIABLE TO YOU FOR ACTUAL, DIRECT, INDIRECT, CONSEQUENTIAL, PUNITIVE OR +INCIDENTAL DAMAGES EVEN IF YOU GIVE NOTICE OF THE POSSIBILITY OF SUCH +DAMAGE. + +1.F.3. LIMITED RIGHT OF REPLACEMENT OR REFUND - If you discover a +defect in this electronic work within 90 days of receiving it, you can +receive a refund of the money (if any) you paid for it by sending a +written explanation to the person you received the work from. If you +received the work on a physical medium, you must return the medium with +your written explanation. The person or entity that provided you with +the defective work may elect to provide a replacement copy in lieu of a +refund. If you received the work electronically, the person or entity +providing it to you may choose to give you a second opportunity to +receive the work electronically in lieu of a refund. If the second copy +is also defective, you may demand a refund in writing without further +opportunities to fix the problem. + +1.F.4. Except for the limited right of replacement or refund set forth +in paragraph 1.F.3, this work is provided to you 'AS-IS', WITH NO OTHER +WARRANTIES OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PURPOSE. + +1.F.5. Some states do not allow disclaimers of certain implied +warranties or the exclusion or limitation of certain types of damages. +If any disclaimer or limitation set forth in this agreement violates the +law of the state applicable to this agreement, the agreement shall be +interpreted to make the maximum disclaimer or limitation permitted by +the applicable state law. The invalidity or unenforceability of any +provision of this agreement shall not void the remaining provisions. + +1.F.6. INDEMNITY - You agree to indemnify and hold the Foundation, the +trademark owner, any agent or employee of the Foundation, anyone +providing copies of Project Gutenberg-tm electronic works in accordance +with this agreement, and any volunteers associated with the production, +promotion and distribution of Project Gutenberg-tm electronic works, +harmless from all liability, costs and expenses, including legal fees, +that arise directly or indirectly from any of the following which you do +or cause to occur: (a) distribution of this or any Project Gutenberg-tm +work, (b) alteration, modification, or additions or deletions to any +Project Gutenberg-tm work, and (c) any Defect you cause. + + +Section 2. Information about the Mission of Project Gutenberg-tm + +Project Gutenberg-tm is synonymous with the free distribution of +electronic works in formats readable by the widest variety of computers +including obsolete, old, middle-aged and new computers. It exists +because of the efforts of hundreds of volunteers and donations from +people in all walks of life. + +Volunteers and financial support to provide volunteers with the +assistance they need are critical to reaching Project Gutenberg-tm's +goals and ensuring that the Project Gutenberg-tm collection will +remain freely available for generations to come. In 2001, the Project +Gutenberg Literary Archive Foundation was created to provide a secure +and permanent future for Project Gutenberg-tm and future generations. +To learn more about the Project Gutenberg Literary Archive Foundation +and how your efforts and donations can help, see Sections 3 and 4 +and the Foundation information page at www.gutenberg.org + + +Section 3. Information about the Project Gutenberg Literary Archive +Foundation + +The Project Gutenberg Literary Archive Foundation is a non profit +501(c)(3) educational corporation organized under the laws of the +state of Mississippi and granted tax exempt status by the Internal +Revenue Service. The Foundation's EIN or federal tax identification +number is 64-6221541. Contributions to the Project Gutenberg +Literary Archive Foundation are tax deductible to the full extent +permitted by U.S. federal laws and your state's laws. + +The Foundation's principal office is located at 4557 Melan Dr. S. +Fairbanks, AK, 99712., but its volunteers and employees are scattered +throughout numerous locations. Its business office is located at 809 +North 1500 West, Salt Lake City, UT 84116, (801) 596-1887. Email +contact links and up to date contact information can be found at the +Foundation's web site and official page at www.gutenberg.org/contact + +For additional contact information: + Dr. Gregory B. Newby + Chief Executive and Director + gbnewby@pglaf.org + +Section 4. Information about Donations to the Project Gutenberg +Literary Archive Foundation + +Project Gutenberg-tm depends upon and cannot survive without wide +spread public support and donations to carry out its mission of +increasing the number of public domain and licensed works that can be +freely distributed in machine readable form accessible by the widest +array of equipment including outdated equipment. Many small donations +($1 to $5,000) are particularly important to maintaining tax exempt +status with the IRS. + +The Foundation is committed to complying with the laws regulating +charities and charitable donations in all 50 states of the United +States. Compliance requirements are not uniform and it takes a +considerable effort, much paperwork and many fees to meet and keep up +with these requirements. We do not solicit donations in locations +where we have not received written confirmation of compliance. To +SEND DONATIONS or determine the status of compliance for any +particular state visit www.gutenberg.org/donate + +While we cannot and do not solicit contributions from states where we +have not met the solicitation requirements, we know of no prohibition +against accepting unsolicited donations from donors in such states who +approach us with offers to donate. + +International donations are gratefully accepted, but we cannot make +any statements concerning tax treatment of donations received from +outside the United States. U.S. laws alone swamp our small staff. + +Please check the Project Gutenberg Web pages for current donation +methods and addresses. Donations are accepted in a number of other +ways including checks, online payments and credit card donations. +To donate, please visit: www.gutenberg.org/donate + + +Section 5. General Information About Project Gutenberg-tm electronic +works. + +Professor Michael S. Hart was the originator of the Project Gutenberg-tm +concept of a library of electronic works that could be freely shared +with anyone. For forty years, he produced and distributed Project +Gutenberg-tm eBooks with only a loose network of volunteer support. + +Project Gutenberg-tm eBooks are often created from several printed +editions, all of which are confirmed as Public Domain in the U.S. +unless a copyright notice is included. Thus, we do not necessarily +keep eBooks in compliance with any particular paper edition. + +Most people start at our Web site which has the main PG search facility: + + www.gutenberg.org + +This Web site includes information about Project Gutenberg-tm, +including how to make donations to the Project Gutenberg Literary +Archive Foundation, how to help produce our new eBooks, and how to +subscribe to our email newsletter to hear about new eBooks. +', + 'source-pretty': 'Gutenberg' + } +} +``` + +## Data Fields + +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. + +## License Information
Gutenberg License

- -``` *** START: FULL LICENSE *** THE FULL PROJECT GUTENBERG LICENSE @@ -398,57 +694,6 @@ This Web site includes information about Project Gutenberg-tm, including how to make donations to the Project Gutenberg Literary Archive Foundation, how to help produce our new eBooks, and how to subscribe to our email newsletter to hear about new eBooks. -```

- - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/gutenberg/gutenberg.parquet b/data/gutenberg/gutenberg.parquet index 9d38a124e796705682ba0caaadecb8d8caaab43d..e2a16e9856fd2d6879045f22041a0eff1e13277b 100644 --- a/data/gutenberg/gutenberg.parquet +++ b/data/gutenberg/gutenberg.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7a3384a2012839c9caa2d6d971447ad4503937031f5c397bc2c5c0249445caad -size 12332839 +oid sha256:973df5121d3da73a5915f6dd1da0290ffbaece92b2c7c4dec562155974c0076f +size 12361984 diff --git a/data/gutenberg/images/dist_document_length.png b/data/gutenberg/images/dist_document_length.png deleted file mode 100644 index d33188d87c182e79c6be4520c947fe8d0e7df20b..0000000000000000000000000000000000000000 --- a/data/gutenberg/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6a920a22d290d5906eaf27b4fe907a7edc74830ad1001256155acc7c426c414b -size 544163 diff --git a/data/health_hovedstaden/create.py b/data/health_hovedstaden/create.py deleted file mode 100644 index 68332f1ea777552970fa2f162fc346d4a6362fd0..0000000000000000000000000000000000000000 --- a/data/health_hovedstaden/create.py +++ /dev/null @@ -1,74 +0,0 @@ -# /// script -# requires-python = "==3.12" -# dependencies = [ -# "datasets==3.2.0", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// -""" -Script for downloading and processing Health Hovedstaden texts. - -Note: To run this script, you need to set `GIT_LFS_SKIP_SMUDGE=1` to be able to install dynaword: - -```bash -GIT_LFS_SKIP_SMUDGE=1 uv run data/health_hovedstaden/create.py -``` -""" - -import logging -from datetime import datetime -from pathlib import Path -from typing import cast - -from datasets import Dataset, load_dataset - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) - -download_path = Path(__file__).parent / "tmp" - - -def main(): - save_path = Path(__file__).parent / "health_hovedstaden.parquet" - # Download data from repo: Den-Intelligente-Patientjournal/region_hovedstaden_text - ds = load_dataset( - "Den-Intelligente-Patientjournal/region_hovedstaden_text", split="train" - ) - dataset: Dataset = cast(Dataset, ds) - - # Extract the cleaned column - dataset = dataset.rename_column("cleaned", "text") - - # Add created column: 2015 and 2020 - dataset = dataset.add_column("created", ["2015-01-01, 2020-12-31"] * len(dataset)) # type: ignore - # Add added column: today - dataset = dataset.add_column( - "added", [datetime.today().date().strftime("%Y-%m-%d")] * len(dataset) - ) # type: ignore - # Add source column: health_hovedstaden - dataset = dataset.add_column("source", ["health_hovedstaden"] * len(dataset)) # type: ignore - # Add id column: health_hovedstaden_{idx} - dataset = dataset.add_column( - "id", [f"health_hovedstaden_{i}" for i in range(len(dataset))] - ) # type: ignore - - # quality checks and processing - dataset = remove_empty_texts(dataset) - dataset = remove_duplicate_text(dataset) - dataset = add_token_count(dataset) - dataset = ensure_column_order(dataset) - - # save to parquet - dataset.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/health_hovedstaden/descriptive_stats.json b/data/health_hovedstaden/descriptive_stats.json deleted file mode 100644 index 1ac884b4f96b5e7da46367e12d8395b340b1eb88..0000000000000000000000000000000000000000 --- a/data/health_hovedstaden/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 23996, - "number_of_tokens": 27066716, - "min_length_tokens": 4, - "max_length_tokens": 51033, - "number_of_characters": 79883922, - "min_length_characters": 4, - "max_length_characters": 152339 -} \ No newline at end of file diff --git a/data/health_hovedstaden/health_hovedstaden.md b/data/health_hovedstaden/health_hovedstaden.md deleted file mode 100644 index aafd94ad90dee14d984d2d41dc44040ee0055b57..0000000000000000000000000000000000000000 --- a/data/health_hovedstaden/health_hovedstaden.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -pretty_name: Health Hovedstaden -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- Den-Intelligente-Patientjournal/region_hovedstaden_text -domains: -- Medical -- Encyclopedic ---- - -# Dataset Card for Health Hovedstaden - - -Guidelines and informational documents for healthcare professionals from the Capital Region - - -The document collection consists of guidelines and informational documents for healthcare professionals in the Capital Region of Denmark. The documents therefore contain a number of specialized terms and concepts that are frequently used within the healthcare sector. - -The corpus was created based on the texts in the document collection and has been post-processed so that the texts can be used for the development of language technology. - -Martin Sundahl Laursen and Thiusius R. Savarimuthu from the University of Southern Denmark have assisted the Danish Agency for Digital Government with the post-processing of the data. Read their joint paper on "[Automatic Annotation of Training Data for Deep Learning Based De-identification of Narrative Clinical Text](https://ceur-ws.org/Vol-3416/paper_5.pdf)." - - - - -## Dataset Description - - -- **Number of samples**: 24.00K -- **Number of tokens (Llama 3)**: 27.07M -- **Average document length in tokens (min, max)**: 1.13K (4, 51.03K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - -```py -{ - "id": "health_hovedstaden_0", - "text": "Acetylsalicylsyre - Aspirin, Akutlægebil\n\nMålgrupper og anvendelsesområde\nDefinitioner\nFremgangsmåde[...]", - "source": "health_hovedstaden", - "added": "2025-07-07", - "created": "2015-01-01, 2020-12-31", - "token_count": 766 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Additional Processing - - - -### Unintended Uses - -Please note that the corpus has been developed for the purpose of language technology development and should not be used as a source of healthcare information. The documents were scraped at a specific time and will therefore not be updated with changes. In this regard, please refer to the Capital Region of Denmark's document collection. - - -### Dataset Statistics - - -

- -

- - - -# Additional Information - -## License Information -The dataset have been released under a CC-0 license. - -### Citation Information - -If you are using the data please reference the following paper [Automatic Annotation of Training Data for Deep Learning Based De-identification of Narrative Clinical Text](https://ceur-ws.org/Vol-3416/paper_5.pdf) diff --git a/data/health_hovedstaden/health_hovedstaden.parquet b/data/health_hovedstaden/health_hovedstaden.parquet deleted file mode 100644 index 1539d220df508c909c898839777810f03f02d3dc..0000000000000000000000000000000000000000 --- a/data/health_hovedstaden/health_hovedstaden.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd95bbe8f9fc0b8ec530b0d0b9ba1ea51b4e7ec509a599a13dc3ec0aeac6c6d2 -size 41434842 diff --git a/data/health_hovedstaden/images/dist_document_length.png b/data/health_hovedstaden/images/dist_document_length.png deleted file mode 100644 index b37faedfddb2f09b04fb6964cad5e8be9c102cd8..0000000000000000000000000000000000000000 --- a/data/health_hovedstaden/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5252f386997e3d670bbea4ede6003170e2aa6793b59ca46217c713bf113d9334 -size 565700 diff --git a/data/hest/descriptive_stats.json b/data/hest/descriptive_stats.json deleted file mode 100644 index b9af6ecfb4ba1bd7a6c35dffa0594e08e903b4f6..0000000000000000000000000000000000000000 --- a/data/hest/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 14342, - "number_of_tokens": 389318703, - "min_length_tokens": 3, - "max_length_tokens": 9812841, - "number_of_characters": 1193724991, - "min_length_characters": 2, - "max_length_characters": 30324298 -} \ No newline at end of file diff --git a/data/hest/hest.md b/data/hest/hest.md index 51835bcb596e419f8c47ef9aaa1eb93781db834f..b46957d8567d9a6a84a0816c1579ae7374f25934 100644 --- a/data/hest/hest.md +++ b/data/hest/hest.md @@ -1,97 +1,56 @@ --- pretty_name: Hestenettet (Danish debate forum) language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 10k-100k + - 10k-100k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Social Media + - language-modeling --- - -# Dataset Card for Hestenettet - - -Samples from the Danish debate forum www.heste-nettet.dk. - - - -The forum have been in use since 1997 and it is used as a debate forum covering a wide range of everyday topics. - -Its inclusion as training data for large language models have multiple times reached [national news](https://www.dr.dk/nyheder/viden/teknologi/heste-nettet-kan-blive-grundlag-kunstig-intelligens-paa-dansk). - +# Dataset Card for Hestenettet (Danish debate forum) ## Dataset Description - - - -- **Number of samples**: 14.34K -- **Number of tokens (Llama 3)**: 389.32M -- **Average document length in tokens (min, max)**: 27.15K (3, 9.81M) - - - - -## Dataset Structure +- **Number of records:** 14391 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "hest_forum112802271280227_0", - "text": "Er den ikke kær? \nJeg kan ikke forstå at der altid er nogle der åbenbart ser alle indlæg her på HN ,[...]", - "source": "hest", - "added": "2020-10-05", - "created": "2000-01-01, 2022-01-01", - "token_count": 311 + 'text': 'Er den ikke kær? +Jeg kan ikke forstå at der altid', + 'source': 'hest', + 'id': 'hest_forum112802271280227_0', + 'added': '2020-10-05', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Social Media', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Hestenettet (Danish debate forum)' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## Data Fields +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -### Dataset Statistics +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/hest/hest.parquet b/data/hest/hest.parquet index e52b5367d1d71650d2774f473c2a6499804c6c47..7bfdd4ec898b9598f7d4f3140877fbe4b0f4d824 100644 --- a/data/hest/hest.parquet +++ b/data/hest/hest.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a971729650723836b4864b04b233332883101e08ff5135fa764cb895d9c4478f -size 747758292 +oid sha256:9b85d658074ebec3eb95da8f8e522d83707b646b5f3b8b706279496eec3b31c3 +size 748670544 diff --git a/data/hest/images/dist_document_length.png b/data/hest/images/dist_document_length.png deleted file mode 100644 index ff3201bb33fc232f33f9610302e769150d5e5c32..0000000000000000000000000000000000000000 --- a/data/hest/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d1f941ed35251646a0eb572230caee97959410018ceaeedfa1e9f65f6ab9a061 -size 549694 diff --git a/data/historical-danish-handwriting/create.py b/data/historical-danish-handwriting/create.py deleted file mode 100644 index 67d44354729cfecd002206ae4eeddd2d48ff4367..0000000000000000000000000000000000000000 --- a/data/historical-danish-handwriting/create.py +++ /dev/null @@ -1,127 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "beautifulsoup4", -# "datasets", -# "glob", -# "huggingface-hub", -# "pandas", -# ] -# /// - -""" -This script loads the historical danish handwriting data from Aarhus city archives (https://huggingface.co/datasets/aarhus-city-archives/historical-danish-handwriting). -It loads the columns doc_id, sequence and alto (and not the image files). -""" - -import os -from pathlib import Path -import glob -from datetime import datetime -import re -import pandas as pd -from bs4 import BeautifulSoup -from huggingface_hub import snapshot_download -from datasets import Dataset, Features, Value -from dynaword.process_dataset import add_token_count, ensure_column_order - - -def load_historical_danish_handwriting(path): - """Load historical-danish-handwriting data from aarhus-city-archives huggingface repo (path), return as pd df""" - raw_df = pd.DataFrame(columns=["doc_id", "sequence", "alto"]) - for file in sorted(glob.glob(os.path.join(path, "data", "*.parquet"))): - raw_df = pd.concat( - [raw_df, pd.read_parquet(file, columns=["doc_id", "sequence", "alto"])], - axis=0, - ignore_index=True, - ) - raw_df.sort_values(by=["doc_id", "sequence"], inplace=True) - return raw_df - - -def parse_xml(bs_obj): - """Extract text from xml strings (alto format)""" - text = "" - layout_str = bs_obj.Layout.find_all("String") - if len(layout_str) == 0: - return "" - else: - for j in layout_str: - if len(text) == 0: - text = text + j.attrs["CONTENT"] - else: - text = text + " " + j.attrs["CONTENT"] - return "\n" + text - - -def extract_text(raw_df): - """ - Extract text from historical-danish-handwriting data and return pd df - """ - text_df = pd.DataFrame(columns=["id", "text", "source", "added", "created"]) - i = 0 - for doc in pd.unique(raw_df["doc_id"]): - for seq in raw_df[raw_df["doc_id"] == doc]["sequence"]: - bs = BeautifulSoup( - raw_df.loc[(raw_df["doc_id"] == doc) & (raw_df["sequence"] == seq)][ - "alto" - ].item(), - "xml", - ) - if seq == 1: - filename = re.split("_", bs.Description.fileName.string)[1] - text_df.loc[i, "text"] = filename + parse_xml(bs) - else: - assert ( - filename in bs.Description.fileName.string - ), f"id in current seq {seq} does not match first in doc {doc}" - text_df.at[i, "text"] = text_df.loc[i, "text"] + parse_xml(bs) - text_df["source"] = "historical-danish-handwriting" - text_df.loc[i, "text"] = text_df.loc[i, "text"].strip("\n") - created = re.search(r"[0-9]{4}-[0-9]{4}", filename).group().split("-") - text_df.loc[i, "created"] = f"{created[0]}-01-01, {created[1]}-12-31" - text_df.loc[i, "id"] = ( - f"{text_df.loc[i, 'source']}_{str(doc)}_{re.sub(" ", "_", filename).strip().lower()}" - ) - i += 1 - return text_df - - -def convert_dynaword_format(text_df, outpath): - """Converts pandas df to dynaword format (dataset), adds source, added and token count columns. Saves as .parquet to defined path""" - text_df["added"] = datetime.now().date().isoformat() - ds = Dataset.from_pandas( - text_df, - preserve_index=False, - split="train", - features=Features( - { - "id": Value(dtype="string"), - "text": Value(dtype="string"), - "source": Value(dtype="string"), - "added": Value(dtype="string"), - "created": Value(dtype="string"), - "token_count": Value(dtype="int64"), - } - ), - ) - ds = add_token_count(ds) - ds = ensure_column_order(ds) - ds.to_parquet(outpath) - return print(f"extracted text saved to {outpath}") - - -def main(): - outpath = Path(__file__).parent / "historical-danish-handwriting.parquet" - path = snapshot_download( - repo_id="aarhus-city-archives/historical-danish-handwriting", - repo_type="dataset", - allow_patterns="data/", - ) - raw_df = load_historical_danish_handwriting(path) - text_df = extract_text(raw_df) - convert_dynaword_format(text_df, outpath) - - -if __name__ == "__main__": - main() diff --git a/data/historical-danish-handwriting/descriptive_stats.json b/data/historical-danish-handwriting/descriptive_stats.json deleted file mode 100644 index 948b25621b6e133c84f6591e36d4306311272a81..0000000000000000000000000000000000000000 --- a/data/historical-danish-handwriting/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 41, - "number_of_tokens": 5201057, - "min_length_tokens": 15248, - "max_length_tokens": 397342, - "number_of_characters": 14385594, - "min_length_characters": 42410, - "max_length_characters": 1128052 -} \ No newline at end of file diff --git a/data/historical-danish-handwriting/historical-danish-handwriting.md b/data/historical-danish-handwriting/historical-danish-handwriting.md deleted file mode 100644 index d100b21dfd0c0c927d1e34a303c040165e019762..0000000000000000000000000000000000000000 --- a/data/historical-danish-handwriting/historical-danish-handwriting.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -pretty_name: Historical Danish handwriting 1841-1939 -language: -- da -license: cc-by-4.0 -license_name: CC-BY 4.0 -size_categories: -- 10K -Minutes from City and Parish Council meetings between 1841 and 1939 from [The Historical Danish handwriting dataset](https://huggingface.co/datasets/aarhus-city-archives/historical-danish-handwriting) - - -More information about the original dataset, which includes the scanned pages can be found [here](https://huggingface.co/datasets/aarhus-city-archives/historical-danish-handwriting). The original dataset is curated by Aarhus City Archives, and contains more than 11.000 scanned documents and each's transcription. Transcription was primarily done by volunteers and employees at the participating Danish city archives. -For the current dataset, text from pages from the same collections (document ids) have been concatenated resulting in a dataset of 41 rows. - - -## Dataset Description - - - -- **Number of samples**: 41 -- **Number of tokens (Llama 3)**: 5.20M -- **Average document length in tokens (min, max)**: 126.86K (15.25K, 397.34K) - - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "historical-danish-handwriting_23739_elsted_forhandlingsprotokol_1907-1930", - "text": "Elsted Forhandlingsprotokol 1907-1930\n1916 d. 22 April holdtes Møde i Lystrup Afholdshotel Alle Medl[...]", - "source": "historical-danish-handwriting", - "added": "2025-09-15", - "created": "1907-01-01, 1930-12-31", - "token_count": 70588 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - -### Dataset Statistics - - -

- -

- - - -## Personal and Sensitive Information - -As the latest (newest) data in the dataset is from 1939 and the dataset almost exclusively consists of deliberations on public matters, we have made no efforts to anonymize the data. - -The dataset might contain data that can be considered sensitive (e.g., data that reveals racial or ethnic origins, sexual orientations, religious beliefs, political opinions or union memberships, or locations; financial or health data; biometric or genetic data; forms of government identification, such as social security numbers; criminal history). - -If you, contrary to expectations, should find any sensitive personal data, please contact us and we will, if necessary, anonymize the specific data point. - - -## Additional Information - - -## License Information - -The dataset is licensed under the [Creative Commons Attribution 4.0 International](https://creativecommons.org/licenses/by/4.0/) (the same as the original [dataset](https://huggingface.co/datasets/aarhus-city-archives/historical-danish-handwriting)). diff --git a/data/historical-danish-handwriting/historical-danish-handwriting.parquet b/data/historical-danish-handwriting/historical-danish-handwriting.parquet deleted file mode 100644 index 63ba4e86288a906fabb60e785b3af6fd9e1045fc..0000000000000000000000000000000000000000 --- a/data/historical-danish-handwriting/historical-danish-handwriting.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:35b0ecccfe7887b227651f2eb6aff31b68ec53041863a9bee066ead4bf5b0de9 -size 7354723 diff --git a/data/historical-danish-handwriting/images/dist_document_length.png b/data/historical-danish-handwriting/images/dist_document_length.png deleted file mode 100644 index e7ae217a937caa1afeffc17315e31698f3cd971f..0000000000000000000000000000000000000000 --- a/data/historical-danish-handwriting/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c8eca662fb8adbc66bbfaba1a7149ed11a02d3ff12b54cded60198aeb8003887 -size 177857 diff --git a/data/jvj/descriptive_stats.json b/data/jvj/descriptive_stats.json deleted file mode 100644 index 03e33705630d0e62cffd6a8ee9cba2c59d550f3a..0000000000000000000000000000000000000000 --- a/data/jvj/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 42, - "number_of_tokens": 3549181, - "min_length_tokens": 15474, - "max_length_tokens": 271790, - "number_of_characters": 10705534, - "min_length_characters": 47146, - "max_length_characters": 819703 -} \ No newline at end of file diff --git a/data/jvj/images/dist_document_length.png b/data/jvj/images/dist_document_length.png deleted file mode 100644 index d9e07e875cab988297d4bc8a5731bebfd5963d69..0000000000000000000000000000000000000000 --- a/data/jvj/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0e4f83223b0864183be99cd146ede4e1b25e4809be7c7f770b4730d0d4ee947b -size 528296 diff --git a/data/jvj/jvj.md b/data/jvj/jvj.md index f1ee21bf150990a5d1cdef53e879abdcc3b5d93d..b139b8ce7057ca5806b92b36550bdfab351934b5 100644 --- a/data/jvj/jvj.md +++ b/data/jvj/jvj.md @@ -1,97 +1,51 @@ --- -pretty_name: Johannes V. Jensen +pretty_name: Johannes V. Jensen (Danish poet) language: -- da + - da license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 +license_name: Creative Commons Attribution Share Alike 4.0 size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Books + - language-modeling --- - -# Dataset Card for Johannes V. Jensen - - -The works of the Danish author and poet, [Johannes V. Jensen](https://da.wikipedia.org/wiki/Johannes_V._Jensen). - - - - - - - +# Dataset Card for Johannes V. Jensen (Danish poet) ## Dataset Description - - - -- **Number of samples**: 42 -- **Number of tokens (Llama 3)**: 3.55M -- **Average document length in tokens (min, max)**: 84.50K (15.47K, 271.79K) - - - - -## Dataset Structure +- **Number of records:** 42 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "jvj_Jørgine", - "text": "JØRGINE JØRGINE KØBENHAVN HAGE & CLAUSENS FORLAG (J. FR. CLAUSEN) 1926 JOHANNES V. JENSEN COPYRIGHT [...]", - "source": "jvj", - "added": "2020-06-26", - "created": "1873-01-01, 1951-01-01", - "token_count": 29393 + 'text': 'JØRGINE JØRGINE KØBENHAVN HAGE & CLAUSENS FORLAG (', + 'source': 'jvj', + 'id': 'jvj_Jørgine', + 'added': '2020-06-26', + 'created': '1873-01-01, 1951-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': 'Attribution-ShareAlike 4.0 International', + 'source-pretty': 'Johannes V. Jensen (Danish poet)' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - +## Data Fields -### Dataset Statistics +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. - -

- +## License Information +

+Creative Commons Attribution Share Alike 4.0 +

+Attribution-ShareAlike 4.0 International

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/jvj/jvj.parquet b/data/jvj/jvj.parquet index e2f1a70423399118d647fa33024f36fcd5ddb61e..edc850f3d97ea130c3b62dadc1704bc1d6e8fb11 100644 --- a/data/jvj/jvj.parquet +++ b/data/jvj/jvj.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2beede35d3142941126fc6f2be32ad1400f8bbef1a9e29b474e4fff2cb5d6af7 -size 6823232 +oid sha256:7a524aafe8fe1ba86bc09c091b10aacf55e558124fef59e68f60bed03816636a +size 6829395 diff --git a/data/lexdk/create.py b/data/lexdk/create.py deleted file mode 100644 index 0121cb9358bc7c9876877c1a7377d1511909d98f..0000000000000000000000000000000000000000 --- a/data/lexdk/create.py +++ /dev/null @@ -1,85 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "pandas", -# ] -# /// -"""download lexdk from alexandrainst/lexdk-open""" - -from datetime import datetime -from pathlib import Path -from typing import cast - -import pandas as pd -from datasets import Dataset, load_dataset - -column_order = [ - "text", - "source", - "id", - "added", - "created", - "license", - "domain", - "metadata", -] - - -def convert_sample(example: dict) -> dict: - # from sample: - # { - # "url": "https://denstoredanske.lex.dk/Kullmanns_M%C3%B8lle", - # "title": "Kullmanns Mølle", - # "clarification": "", - # "authors": ["https://brugere.lex.dk/6929"], - # "date": "2021-01-20T13:23:20+01:00", - # "license": "fri anvendelse", - # "text": "Kullmanns Mølle er en mølle i Gudhjem, opkaldt efter Matts Kullmann, der byggede møllen i 1893 til sin søn, Christian Kullmann, se Gudhjem Mølle.", - # } - date = datetime.fromisoformat(example["date"]) - text = f"{example['title']}\n\npubliceret: {date}\n{example['text']}" - - new_example = dict( - text_new=text, - id=example["url"], - source="lexdk", - domain="Conversation", - license="cc-by-sa-4.0", - added="2025-01-04", - created=f"{date.date()}, {date.date()}", - metadata={"source-pretty": "Lex.dk"}, - ) - - return new_example - - -def main(): - ds = load_dataset("alexandrainst/lexdk-open", split="train") - ds = cast(Dataset, ds) - - dates = [datetime.fromisoformat(date).date() for date in ds["date"]] - print(str(min(dates)), ",", str(max(dates))) # 2009-01-28, 2023-09-05 - - assert len(set(ds["url"])) == len(ds) - - ds = ds.map(convert_sample, num_proc=4) - ds = ds.select_columns(column_order[1:] + ["text_new"]) - ds = ds.rename_columns({"text_new": "text"}) - # ensure order - ds = ds.select_columns(column_order) - - df = ds.to_pandas() - df = cast(pd.DataFrame, df) - dedup_df = df.drop_duplicates(keep="first", subset=["text"]) - print("N. duplicates: ", df.shape[0] - dedup_df.shape[0]) # 0 - - ds = ds.select(dedup_df.index) - assert len(set(ds["text"])) == len(ds) - - save_path = Path(__file__).parent / "lexdk.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/lexdk/create.py.lock b/data/lexdk/create.py.lock deleted file mode 100644 index 87f326a88bc6d243439ddc1dc06bc64cf26660c1..0000000000000000000000000000000000000000 --- a/data/lexdk/create.py.lock +++ /dev/null @@ -1,694 +0,0 @@ -version = 1 -revision = 1 -requires-python = ">=3.12" - -[manifest] -requirements = [ - { name = "datasets" }, - { name = "pandas" }, -] - -[[package]] -name = "aiohappyeyeballs" -version = "2.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, -] - -[[package]] -name = "aiohttp" -version = "3.11.14" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohappyeyeballs" }, - { name = "aiosignal" }, - { name = "attrs" }, - { name = "frozenlist" }, - { name = "multidict" }, - { name = "propcache" }, - { name = "yarl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6c/96/91e93ae5fd04d428c101cdbabce6c820d284d61d2614d00518f4fa52ea24/aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c", size = 7676994 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/ca/e4acb3b41f9e176f50960f7162d656e79bed151b1f911173b2c4a6c0a9d2/aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc", size = 705489 }, - { url = "https://files.pythonhosted.org/packages/84/d5/dcf870e0b11f0c1e3065b7f17673485afa1ddb3d630ccd8f328bccfb459f/aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a", size = 464807 }, - { url = "https://files.pythonhosted.org/packages/7c/f0/dc417d819ae26be6abcd72c28af99d285887fddbf76d4bbe46346f201870/aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948", size = 456819 }, - { url = "https://files.pythonhosted.org/packages/28/db/f7deb0862ebb821aa3829db20081a122ba67ffd149303f2d5202e30f20cd/aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534", size = 1683536 }, - { url = "https://files.pythonhosted.org/packages/5e/0d/8bf0619e21c6714902c44ab53e275deb543d4d2e68ab2b7b8fe5ba267506/aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f", size = 1738111 }, - { url = "https://files.pythonhosted.org/packages/f5/10/204b3700bb57b30b9e759d453fcfb3ad79a3eb18ece4e298aaf7917757dd/aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307", size = 1794508 }, - { url = "https://files.pythonhosted.org/packages/cc/39/3f65072614c62a315a951fda737e4d9e6e2703f1da0cd2f2d8f629e6092e/aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3", size = 1692006 }, - { url = "https://files.pythonhosted.org/packages/73/77/cc06ecea173f9bee2f20c8e32e2cf4c8e03909a707183cdf95434db4993e/aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0", size = 1620369 }, - { url = "https://files.pythonhosted.org/packages/87/75/5bd424bcd90c7eb2f50fd752d013db4cefb447deeecfc5bc4e8e0b1c74dd/aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6", size = 1642508 }, - { url = "https://files.pythonhosted.org/packages/81/f0/ce936ec575e0569f91e5c8374086a6f7760926f16c3b95428fb55d6bfe91/aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f", size = 1685771 }, - { url = "https://files.pythonhosted.org/packages/68/b7/5216590b99b5b1f18989221c25ac9d9a14a7b0c3c4ae1ff728e906c36430/aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09", size = 1648318 }, - { url = "https://files.pythonhosted.org/packages/a5/c2/c27061c4ab93fa25f925c7ebddc10c20d992dbbc329e89d493811299dc93/aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce", size = 1704545 }, - { url = "https://files.pythonhosted.org/packages/09/f5/11b2da82f2c52365a5b760a4e944ae50a89cf5fb207024b7853615254584/aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b", size = 1737839 }, - { url = "https://files.pythonhosted.org/packages/03/7f/145e23fe0a4c45b256f14c3268ada5497d487786334721ae8a0c818ee516/aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be", size = 1695833 }, - { url = "https://files.pythonhosted.org/packages/1c/78/627dba6ee9fb9439e2e29b521adb1135877a9c7b54811fec5c46e59f2fc8/aiohttp-3.11.14-cp312-cp312-win32.whl", hash = "sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f", size = 412185 }, - { url = "https://files.pythonhosted.org/packages/3f/5f/1737cf6fcf0524693a4aeff8746530b65422236761e7bfdd79c6d2ce2e1c/aiohttp-3.11.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c", size = 438526 }, - { url = "https://files.pythonhosted.org/packages/c5/8e/d7f353c5aaf9f868ab382c3d3320dc6efaa639b6b30d5a686bed83196115/aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50", size = 698774 }, - { url = "https://files.pythonhosted.org/packages/d5/52/097b98d50f8550883f7d360c6cd4e77668c7442038671bb4b349ced95066/aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965", size = 461443 }, - { url = "https://files.pythonhosted.org/packages/2b/5c/19c84bb5796be6ca4fd1432012cfd5f88ec02c8b9e0357cdecc48ff2c4fd/aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228", size = 453717 }, - { url = "https://files.pythonhosted.org/packages/6d/08/61c2b6f04a4e1329c82ffda53dd0ac4b434681dc003578a1237d318be885/aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5", size = 1666559 }, - { url = "https://files.pythonhosted.org/packages/7c/22/913ad5b4b979ecf69300869551c210b2eb8c22ca4cd472824a1425479775/aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db", size = 1721701 }, - { url = "https://files.pythonhosted.org/packages/5b/ea/0ee73ea764b2e1f769c1caf59f299ac017b50632ceaa809960385b68e735/aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0", size = 1779094 }, - { url = "https://files.pythonhosted.org/packages/e6/ca/6ce3da7c3295e0655b3404a309c7002099ca3619aeb04d305cedc77a0a14/aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91", size = 1678406 }, - { url = "https://files.pythonhosted.org/packages/b1/b1/3a13ed54dc6bb57057cc94fec2a742f24a89885cfa84b71930826af40f5f/aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d", size = 1604446 }, - { url = "https://files.pythonhosted.org/packages/00/21/fc9f327a121ff0be32ed4ec3ccca65f420549bf3a646b02f8534ba5fe86d/aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734", size = 1619129 }, - { url = "https://files.pythonhosted.org/packages/56/5b/1a4a45b1f6f95b998c49d3d1e7763a75eeff29f2f5ec7e06d94a359e7d97/aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058", size = 1657924 }, - { url = "https://files.pythonhosted.org/packages/2f/2d/b6211aa0664b87c93fda2f2f60d5211be514a2d5b4935e1286d54b8aa28d/aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73", size = 1617501 }, - { url = "https://files.pythonhosted.org/packages/fa/3d/d46ccb1f361a1275a078bfc1509bcd6dc6873e22306d10baa61bc77a0dfc/aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33", size = 1684211 }, - { url = "https://files.pythonhosted.org/packages/2d/e2/71d12ee6268ad3bf4ee82a4f2fc7f0b943f480296cb6f61af1afe05b8d24/aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49", size = 1715797 }, - { url = "https://files.pythonhosted.org/packages/8d/a7/d0de521dc5ca6e8c766f8d1f373c859925f10b2a96455b16107c1e9b2d60/aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647", size = 1673682 }, - { url = "https://files.pythonhosted.org/packages/f0/86/5c075ebeca7063a49a0da65a4e0aa9e49d741aca9a2fe9552d86906e159b/aiohttp-3.11.14-cp313-cp313-win32.whl", hash = "sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6", size = 411014 }, - { url = "https://files.pythonhosted.org/packages/4a/e0/2f9e77ef2d4a1dbf05f40b7edf1e1ce9be72bdbe6037cf1db1712b455e3e/aiohttp-3.11.14-cp313-cp313-win_amd64.whl", hash = "sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3", size = 436964 }, -] - -[[package]] -name = "aiosignal" -version = "1.3.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "frozenlist" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597 }, -] - -[[package]] -name = "attrs" -version = "25.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815 }, -] - -[[package]] -name = "certifi" -version = "2025.1.31" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, - { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, - { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, - { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, - { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, - { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, - { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, - { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, - { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, - { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, - { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, - { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, - { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, - { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "datasets" -version = "3.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohttp" }, - { name = "dill" }, - { name = "filelock" }, - { name = "fsspec", extra = ["http"] }, - { name = "huggingface-hub" }, - { name = "multiprocess" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pandas" }, - { name = "pyarrow" }, - { name = "pyyaml" }, - { name = "requests" }, - { name = "tqdm" }, - { name = "xxhash" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/99/4b/40cda74a4e0e58450b0c85a737e134ab5df65e6f5c33c5e175db5d6a5227/datasets-3.4.1.tar.gz", hash = "sha256:e23968da79bc014ef9f7540eeb7771c6180eae82c86ebcfcc10535a03caf08b5", size = 566559 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/44/5de560a2625d31801895fb2663693df210c6465960d61a99192caa9afd63/datasets-3.4.1-py3-none-any.whl", hash = "sha256:b91cf257bd64132fa9d953dd4768ab6d63205597301f132a74271cfcce8b5dd3", size = 487392 }, -] - -[[package]] -name = "dill" -version = "0.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252 }, -] - -[[package]] -name = "filelock" -version = "3.18.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 }, -] - -[[package]] -name = "frozenlist" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/ed/0f4cec13a93c02c47ec32d81d11c0c1efbadf4a471e3f3ce7cad366cbbd3/frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817", size = 39930 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/73/fa6d1a96ab7fd6e6d1c3500700963eab46813847f01ef0ccbaa726181dd5/frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21", size = 94026 }, - { url = "https://files.pythonhosted.org/packages/ab/04/ea8bf62c8868b8eada363f20ff1b647cf2e93377a7b284d36062d21d81d1/frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d", size = 54150 }, - { url = "https://files.pythonhosted.org/packages/d0/9a/8e479b482a6f2070b26bda572c5e6889bb3ba48977e81beea35b5ae13ece/frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e", size = 51927 }, - { url = "https://files.pythonhosted.org/packages/e3/12/2aad87deb08a4e7ccfb33600871bbe8f0e08cb6d8224371387f3303654d7/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a", size = 282647 }, - { url = "https://files.pythonhosted.org/packages/77/f2/07f06b05d8a427ea0060a9cef6e63405ea9e0d761846b95ef3fb3be57111/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a", size = 289052 }, - { url = "https://files.pythonhosted.org/packages/bd/9f/8bf45a2f1cd4aa401acd271b077989c9267ae8463e7c8b1eb0d3f561b65e/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee", size = 291719 }, - { url = "https://files.pythonhosted.org/packages/41/d1/1f20fd05a6c42d3868709b7604c9f15538a29e4f734c694c6bcfc3d3b935/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6", size = 267433 }, - { url = "https://files.pythonhosted.org/packages/af/f2/64b73a9bb86f5a89fb55450e97cd5c1f84a862d4ff90d9fd1a73ab0f64a5/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e", size = 283591 }, - { url = "https://files.pythonhosted.org/packages/29/e2/ffbb1fae55a791fd6c2938dd9ea779509c977435ba3940b9f2e8dc9d5316/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9", size = 273249 }, - { url = "https://files.pythonhosted.org/packages/2e/6e/008136a30798bb63618a114b9321b5971172a5abddff44a100c7edc5ad4f/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039", size = 271075 }, - { url = "https://files.pythonhosted.org/packages/ae/f0/4e71e54a026b06724cec9b6c54f0b13a4e9e298cc8db0f82ec70e151f5ce/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784", size = 285398 }, - { url = "https://files.pythonhosted.org/packages/4d/36/70ec246851478b1c0b59f11ef8ade9c482ff447c1363c2bd5fad45098b12/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631", size = 294445 }, - { url = "https://files.pythonhosted.org/packages/37/e0/47f87544055b3349b633a03c4d94b405956cf2437f4ab46d0928b74b7526/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f", size = 280569 }, - { url = "https://files.pythonhosted.org/packages/f9/7c/490133c160fb6b84ed374c266f42800e33b50c3bbab1652764e6e1fc498a/frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8", size = 44721 }, - { url = "https://files.pythonhosted.org/packages/b1/56/4e45136ffc6bdbfa68c29ca56ef53783ef4c2fd395f7cbf99a2624aa9aaa/frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f", size = 51329 }, - { url = "https://files.pythonhosted.org/packages/da/3b/915f0bca8a7ea04483622e84a9bd90033bab54bdf485479556c74fd5eaf5/frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953", size = 91538 }, - { url = "https://files.pythonhosted.org/packages/c7/d1/a7c98aad7e44afe5306a2b068434a5830f1470675f0e715abb86eb15f15b/frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0", size = 52849 }, - { url = "https://files.pythonhosted.org/packages/3a/c8/76f23bf9ab15d5f760eb48701909645f686f9c64fbb8982674c241fbef14/frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2", size = 50583 }, - { url = "https://files.pythonhosted.org/packages/1f/22/462a3dd093d11df623179d7754a3b3269de3b42de2808cddef50ee0f4f48/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f", size = 265636 }, - { url = "https://files.pythonhosted.org/packages/80/cf/e075e407fc2ae7328155a1cd7e22f932773c8073c1fc78016607d19cc3e5/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608", size = 270214 }, - { url = "https://files.pythonhosted.org/packages/a1/58/0642d061d5de779f39c50cbb00df49682832923f3d2ebfb0fedf02d05f7f/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b", size = 273905 }, - { url = "https://files.pythonhosted.org/packages/ab/66/3fe0f5f8f2add5b4ab7aa4e199f767fd3b55da26e3ca4ce2cc36698e50c4/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840", size = 250542 }, - { url = "https://files.pythonhosted.org/packages/f6/b8/260791bde9198c87a465224e0e2bb62c4e716f5d198fc3a1dacc4895dbd1/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439", size = 267026 }, - { url = "https://files.pythonhosted.org/packages/2e/a4/3d24f88c527f08f8d44ade24eaee83b2627793fa62fa07cbb7ff7a2f7d42/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de", size = 257690 }, - { url = "https://files.pythonhosted.org/packages/de/9a/d311d660420b2beeff3459b6626f2ab4fb236d07afbdac034a4371fe696e/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641", size = 253893 }, - { url = "https://files.pythonhosted.org/packages/c6/23/e491aadc25b56eabd0f18c53bb19f3cdc6de30b2129ee0bc39cd387cd560/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e", size = 267006 }, - { url = "https://files.pythonhosted.org/packages/08/c4/ab918ce636a35fb974d13d666dcbe03969592aeca6c3ab3835acff01f79c/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9", size = 276157 }, - { url = "https://files.pythonhosted.org/packages/c0/29/3b7a0bbbbe5a34833ba26f686aabfe982924adbdcafdc294a7a129c31688/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03", size = 264642 }, - { url = "https://files.pythonhosted.org/packages/ab/42/0595b3dbffc2e82d7fe658c12d5a5bafcd7516c6bf2d1d1feb5387caa9c1/frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c", size = 44914 }, - { url = "https://files.pythonhosted.org/packages/17/c4/b7db1206a3fea44bf3b838ca61deb6f74424a8a5db1dd53ecb21da669be6/frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28", size = 51167 }, - { url = "https://files.pythonhosted.org/packages/c6/c8/a5be5b7550c10858fcf9b0ea054baccab474da77d37f1e828ce043a3a5d4/frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3", size = 11901 }, -] - -[[package]] -name = "fsspec" -version = "2024.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/11/de70dee31455c546fbc88301971ec03c328f3d1138cfba14263f651e9551/fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f", size = 291600 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/86/5486b0188d08aa643e127774a99bac51ffa6cf343e3deb0583956dca5b22/fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2", size = 183862 }, -] - -[package.optional-dependencies] -http = [ - { name = "aiohttp" }, -] - -[[package]] -name = "huggingface-hub" -version = "0.29.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "packaging" }, - { name = "pyyaml" }, - { name = "requests" }, - { name = "tqdm" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e5/f9/851f34b02970e8143d41d4001b2d49e54ef113f273902103823b8bc95ada/huggingface_hub-0.29.3.tar.gz", hash = "sha256:64519a25716e0ba382ba2d3fb3ca082e7c7eb4a2fc634d200e8380006e0760e5", size = 390123 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/0c/37d380846a2e5c9a3c6a73d26ffbcfdcad5fc3eacf42fdf7cff56f2af634/huggingface_hub-0.29.3-py3-none-any.whl", hash = "sha256:0b25710932ac649c08cdbefa6c6ccb8e88eef82927cacdb048efb726429453aa", size = 468997 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "multidict" -version = "6.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/82/4a/7874ca44a1c9b23796c767dd94159f6c17e31c0e7d090552a1c623247d82/multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8", size = 71066 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/e2/0153a8db878aef9b2397be81e62cbc3b32ca9b94e0f700b103027db9d506/multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b", size = 49204 }, - { url = "https://files.pythonhosted.org/packages/bb/9d/5ccb3224a976d1286f360bb4e89e67b7cdfb87336257fc99be3c17f565d7/multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4", size = 29807 }, - { url = "https://files.pythonhosted.org/packages/62/32/ef20037f51b84b074a89bab5af46d4565381c3f825fc7cbfc19c1ee156be/multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44", size = 30000 }, - { url = "https://files.pythonhosted.org/packages/97/81/b0a7560bfc3ec72606232cd7e60159e09b9cf29e66014d770c1315868fa2/multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd", size = 131820 }, - { url = "https://files.pythonhosted.org/packages/49/3b/768bfc0e41179fbccd3a22925329a11755b7fdd53bec66dbf6b8772f0bce/multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e", size = 136272 }, - { url = "https://files.pythonhosted.org/packages/71/ac/fd2be3fe98ff54e7739448f771ba730d42036de0870737db9ae34bb8efe9/multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c", size = 135233 }, - { url = "https://files.pythonhosted.org/packages/93/76/1657047da771315911a927b364a32dafce4135b79b64208ce4ac69525c56/multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87", size = 132861 }, - { url = "https://files.pythonhosted.org/packages/19/a5/9f07ffb9bf68b8aaa406c2abee27ad87e8b62a60551587b8e59ee91aea84/multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29", size = 122166 }, - { url = "https://files.pythonhosted.org/packages/95/23/b5ce3318d9d6c8f105c3679510f9d7202980545aad8eb4426313bd8da3ee/multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd", size = 136052 }, - { url = "https://files.pythonhosted.org/packages/ce/5c/02cffec58ffe120873dce520af593415b91cc324be0345f534ad3637da4e/multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8", size = 130094 }, - { url = "https://files.pythonhosted.org/packages/49/f3/3b19a83f4ebf53a3a2a0435f3e447aa227b242ba3fd96a92404b31fb3543/multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df", size = 140962 }, - { url = "https://files.pythonhosted.org/packages/cc/1a/c916b54fb53168c24cb6a3a0795fd99d0a59a0ea93fa9f6edeff5565cb20/multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d", size = 138082 }, - { url = "https://files.pythonhosted.org/packages/ef/1a/dcb7fb18f64b3727c61f432c1e1a0d52b3924016124e4bbc8a7d2e4fa57b/multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b", size = 136019 }, - { url = "https://files.pythonhosted.org/packages/fb/02/7695485375106f5c542574f70e1968c391f86fa3efc9f1fd76aac0af7237/multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626", size = 26676 }, - { url = "https://files.pythonhosted.org/packages/3c/f5/f147000fe1f4078160157b15b0790fff0513646b0f9b7404bf34007a9b44/multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c", size = 28899 }, - { url = "https://files.pythonhosted.org/packages/a4/6c/5df5590b1f9a821154589df62ceae247537b01ab26b0aa85997c35ca3d9e/multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80", size = 49151 }, - { url = "https://files.pythonhosted.org/packages/d5/ca/c917fbf1be989cd7ea9caa6f87e9c33844ba8d5fbb29cd515d4d2833b84c/multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16", size = 29803 }, - { url = "https://files.pythonhosted.org/packages/22/19/d97086fc96f73acf36d4dbe65c2c4175911969df49c4e94ef082be59d94e/multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e", size = 29947 }, - { url = "https://files.pythonhosted.org/packages/e3/3b/203476b6e915c3f51616d5f87230c556e2f24b168c14818a3d8dae242b1b/multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817", size = 130369 }, - { url = "https://files.pythonhosted.org/packages/c6/4f/67470007cf03b2bb6df8ae6d716a8eeb0a7d19e0c8dba4e53fa338883bca/multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc", size = 135231 }, - { url = "https://files.pythonhosted.org/packages/6d/f5/7a5ce64dc9a3fecc7d67d0b5cb9c262c67e0b660639e5742c13af63fd80f/multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1", size = 133634 }, - { url = "https://files.pythonhosted.org/packages/05/93/ab2931907e318c0437a4cd156c9cfff317ffb33d99ebbfe2d64200a870f7/multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844", size = 131349 }, - { url = "https://files.pythonhosted.org/packages/54/aa/ab8eda83a6a85f5b4bb0b1c28e62b18129b14519ef2e0d4cfd5f360da73c/multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48", size = 120861 }, - { url = "https://files.pythonhosted.org/packages/15/2f/7d08ea7c5d9f45786893b4848fad59ec8ea567367d4234691a721e4049a1/multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0", size = 134611 }, - { url = "https://files.pythonhosted.org/packages/8b/07/387047bb1eac563981d397a7f85c75b306df1fff3c20b90da5a6cf6e487e/multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f", size = 128955 }, - { url = "https://files.pythonhosted.org/packages/8d/6e/7ae18f764a5282c2d682f1c90c6b2a0f6490327730170139a7a63bf3bb20/multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de", size = 139759 }, - { url = "https://files.pythonhosted.org/packages/b6/f4/c1b3b087b9379b9e56229bcf6570b9a963975c205a5811ac717284890598/multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02", size = 136426 }, - { url = "https://files.pythonhosted.org/packages/a2/0e/ef7b39b161ffd40f9e25dd62e59644b2ccaa814c64e9573f9bc721578419/multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d", size = 134648 }, - { url = "https://files.pythonhosted.org/packages/37/5c/7905acd0ca411c97bcae62ab167d9922f0c5a1d316b6d3af875d4bda3551/multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e", size = 26680 }, - { url = "https://files.pythonhosted.org/packages/89/36/96b071d1dad6ac44fe517e4250329e753787bb7a63967ef44bb9b3a659f6/multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2", size = 28942 }, - { url = "https://files.pythonhosted.org/packages/f5/05/d686cd2a12d648ecd434675ee8daa2901a80f477817e89ab3b160de5b398/multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7", size = 50807 }, - { url = "https://files.pythonhosted.org/packages/4c/1f/c7db5aac8fea129fa4c5a119e3d279da48d769138ae9624d1234aa01a06f/multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b", size = 30474 }, - { url = "https://files.pythonhosted.org/packages/e5/f1/1fb27514f4d73cea165429dcb7d90cdc4a45445865832caa0c50dd545420/multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e", size = 30841 }, - { url = "https://files.pythonhosted.org/packages/d6/6b/9487169e549a23c8958edbb332afaf1ab55d61f0c03cb758ee07ff8f74fb/multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025", size = 148658 }, - { url = "https://files.pythonhosted.org/packages/d7/22/79ebb2e4f70857c94999ce195db76886ae287b1b6102da73df24dcad4903/multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd", size = 151988 }, - { url = "https://files.pythonhosted.org/packages/49/5d/63b17f3c1a2861587d26705923a94eb6b2600e5222d6b0d513bce5a78720/multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7", size = 148432 }, - { url = "https://files.pythonhosted.org/packages/a3/22/55204eec45c4280fa431c11494ad64d6da0dc89af76282fc6467432360a0/multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af", size = 143161 }, - { url = "https://files.pythonhosted.org/packages/97/e6/202b2cf5af161228767acab8bc49e73a91f4a7de088c9c71f3c02950a030/multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331", size = 136820 }, - { url = "https://files.pythonhosted.org/packages/7d/16/dbedae0e94c7edc48fddef0c39483f2313205d9bc566fd7f11777b168616/multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c", size = 150875 }, - { url = "https://files.pythonhosted.org/packages/f3/04/38ccf25d4bf8beef76a22bad7d9833fd088b4594c9765fe6fede39aa6c89/multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b", size = 142050 }, - { url = "https://files.pythonhosted.org/packages/9e/89/4f6b43386e7b79a4aad560d751981a0a282a1943c312ac72f940d7cf8f9f/multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151", size = 154117 }, - { url = "https://files.pythonhosted.org/packages/24/e3/3dde5b193f86d30ad6400bd50e116b0df1da3f0c7d419661e3bd79e5ad86/multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019", size = 149408 }, - { url = "https://files.pythonhosted.org/packages/df/b2/ec1e27e8e3da12fcc9053e1eae2f6b50faa8708064d83ea25aa7fb77ffd2/multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547", size = 145767 }, - { url = "https://files.pythonhosted.org/packages/3a/8e/c07a648a9d592fa9f3a19d1c7e1c7738ba95aff90db967a5a09cff1e1f37/multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc", size = 28950 }, - { url = "https://files.pythonhosted.org/packages/dc/a9/bebb5485b94d7c09831638a4df9a1a924c32431a750723f0bf39cd16a787/multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44", size = 32001 }, - { url = "https://files.pythonhosted.org/packages/9c/fd/b247aec6add5601956d440488b7f23151d8343747e82c038af37b28d6098/multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530", size = 10266 }, -] - -[[package]] -name = "multiprocess" -version = "0.70.16" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dill" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824 }, - { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519 }, - { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741 }, - { url = "https://files.pythonhosted.org/packages/ea/89/38df130f2c799090c978b366cfdf5b96d08de5b29a4a293df7f7429fa50b/multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435", size = 132628 }, - { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351 }, -] - -[[package]] -name = "numpy" -version = "2.2.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/78/31103410a57bc2c2b93a3597340a8119588571f6a4539067546cb9a0bfac/numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f", size = 20270701 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/30/182db21d4f2a95904cec1a6f779479ea1ac07c0647f064dea454ec650c42/numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4", size = 20947156 }, - { url = "https://files.pythonhosted.org/packages/24/6d/9483566acfbda6c62c6bc74b6e981c777229d2af93c8eb2469b26ac1b7bc/numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854", size = 14133092 }, - { url = "https://files.pythonhosted.org/packages/27/f6/dba8a258acbf9d2bed2525cdcbb9493ef9bae5199d7a9cb92ee7e9b2aea6/numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24", size = 5163515 }, - { url = "https://files.pythonhosted.org/packages/62/30/82116199d1c249446723c68f2c9da40d7f062551036f50b8c4caa42ae252/numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee", size = 6696558 }, - { url = "https://files.pythonhosted.org/packages/0e/b2/54122b3c6df5df3e87582b2e9430f1bdb63af4023c739ba300164c9ae503/numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba", size = 14084742 }, - { url = "https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592", size = 16134051 }, - { url = "https://files.pythonhosted.org/packages/8e/21/efd47800e4affc993e8be50c1b768de038363dd88865920439ef7b422c60/numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb", size = 15578972 }, - { url = "https://files.pythonhosted.org/packages/04/1e/f8bb88f6157045dd5d9b27ccf433d016981032690969aa5c19e332b138c0/numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f", size = 17898106 }, - { url = "https://files.pythonhosted.org/packages/2b/93/df59a5a3897c1f036ae8ff845e45f4081bb06943039ae28a3c1c7c780f22/numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00", size = 6311190 }, - { url = "https://files.pythonhosted.org/packages/46/69/8c4f928741c2a8efa255fdc7e9097527c6dc4e4df147e3cadc5d9357ce85/numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146", size = 12644305 }, - { url = "https://files.pythonhosted.org/packages/2a/d0/bd5ad792e78017f5decfb2ecc947422a3669a34f775679a76317af671ffc/numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7", size = 20933623 }, - { url = "https://files.pythonhosted.org/packages/c3/bc/2b3545766337b95409868f8e62053135bdc7fa2ce630aba983a2aa60b559/numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0", size = 14148681 }, - { url = "https://files.pythonhosted.org/packages/6a/70/67b24d68a56551d43a6ec9fe8c5f91b526d4c1a46a6387b956bf2d64744e/numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392", size = 5148759 }, - { url = "https://files.pythonhosted.org/packages/1c/8b/e2fc8a75fcb7be12d90b31477c9356c0cbb44abce7ffb36be39a0017afad/numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc", size = 6683092 }, - { url = "https://files.pythonhosted.org/packages/13/73/41b7b27f169ecf368b52533edb72e56a133f9e86256e809e169362553b49/numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298", size = 14081422 }, - { url = "https://files.pythonhosted.org/packages/4b/04/e208ff3ae3ddfbafc05910f89546382f15a3f10186b1f56bd99f159689c2/numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7", size = 16132202 }, - { url = "https://files.pythonhosted.org/packages/fe/bc/2218160574d862d5e55f803d88ddcad88beff94791f9c5f86d67bd8fbf1c/numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6", size = 15573131 }, - { url = "https://files.pythonhosted.org/packages/a5/78/97c775bc4f05abc8a8426436b7cb1be806a02a2994b195945600855e3a25/numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd", size = 17894270 }, - { url = "https://files.pythonhosted.org/packages/b9/eb/38c06217a5f6de27dcb41524ca95a44e395e6a1decdc0c99fec0832ce6ae/numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c", size = 6308141 }, - { url = "https://files.pythonhosted.org/packages/52/17/d0dd10ab6d125c6d11ffb6dfa3423c3571befab8358d4f85cd4471964fcd/numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3", size = 12636885 }, - { url = "https://files.pythonhosted.org/packages/fa/e2/793288ede17a0fdc921172916efb40f3cbc2aa97e76c5c84aba6dc7e8747/numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8", size = 20961829 }, - { url = "https://files.pythonhosted.org/packages/3a/75/bb4573f6c462afd1ea5cbedcc362fe3e9bdbcc57aefd37c681be1155fbaa/numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39", size = 14161419 }, - { url = "https://files.pythonhosted.org/packages/03/68/07b4cd01090ca46c7a336958b413cdbe75002286295f2addea767b7f16c9/numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd", size = 5196414 }, - { url = "https://files.pythonhosted.org/packages/a5/fd/d4a29478d622fedff5c4b4b4cedfc37a00691079623c0575978d2446db9e/numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0", size = 6709379 }, - { url = "https://files.pythonhosted.org/packages/41/78/96dddb75bb9be730b87c72f30ffdd62611aba234e4e460576a068c98eff6/numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960", size = 14051725 }, - { url = "https://files.pythonhosted.org/packages/00/06/5306b8199bffac2a29d9119c11f457f6c7d41115a335b78d3f86fad4dbe8/numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8", size = 16101638 }, - { url = "https://files.pythonhosted.org/packages/fa/03/74c5b631ee1ded596945c12027649e6344614144369fd3ec1aaced782882/numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc", size = 15571717 }, - { url = "https://files.pythonhosted.org/packages/cb/dc/4fc7c0283abe0981e3b89f9b332a134e237dd476b0c018e1e21083310c31/numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff", size = 17879998 }, - { url = "https://files.pythonhosted.org/packages/e5/2b/878576190c5cfa29ed896b518cc516aecc7c98a919e20706c12480465f43/numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286", size = 6366896 }, - { url = "https://files.pythonhosted.org/packages/3e/05/eb7eec66b95cf697f08c754ef26c3549d03ebd682819f794cb039574a0a6/numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d", size = 12739119 }, -] - -[[package]] -name = "packaging" -version = "24.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, -] - -[[package]] -name = "pandas" -version = "2.2.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, - { name = "python-dateutil" }, - { name = "pytz" }, - { name = "tzdata" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, - { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, - { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, - { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, - { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, - { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, - { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, - { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, - { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, - { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, - { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, - { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, - { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, - { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, - { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, - { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, - { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, - { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, - { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, - { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, -] - -[[package]] -name = "propcache" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/76/f941e63d55c0293ff7829dd21e7cf1147e90a526756869a9070f287a68c9/propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5", size = 42722 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/2c/921f15dc365796ec23975b322b0078eae72995c7b4d49eba554c6a308d70/propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e", size = 79867 }, - { url = "https://files.pythonhosted.org/packages/11/a5/4a6cc1a559d1f2fb57ea22edc4245158cdffae92f7f92afcee2913f84417/propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af", size = 46109 }, - { url = "https://files.pythonhosted.org/packages/e1/6d/28bfd3af3a567ad7d667348e7f46a520bda958229c4d545ba138a044232f/propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5", size = 45635 }, - { url = "https://files.pythonhosted.org/packages/73/20/d75b42eaffe5075eac2f4e168f6393d21c664c91225288811d85451b2578/propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b", size = 242159 }, - { url = "https://files.pythonhosted.org/packages/a5/fb/4b537dd92f9fd4be68042ec51c9d23885ca5fafe51ec24c58d9401034e5f/propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667", size = 248163 }, - { url = "https://files.pythonhosted.org/packages/e7/af/8a9db04ac596d531ca0ef7dde518feaadfcdabef7b17d6a5ec59ee3effc2/propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7", size = 248794 }, - { url = "https://files.pythonhosted.org/packages/9d/c4/ecfc988879c0fd9db03228725b662d76cf484b6b46f7e92fee94e4b52490/propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7", size = 243912 }, - { url = "https://files.pythonhosted.org/packages/04/a2/298dd27184faa8b7d91cc43488b578db218b3cc85b54d912ed27b8c5597a/propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf", size = 229402 }, - { url = "https://files.pythonhosted.org/packages/be/0d/efe7fec316ca92dbf4bc4a9ba49ca889c43ca6d48ab1d6fa99fc94e5bb98/propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138", size = 226896 }, - { url = "https://files.pythonhosted.org/packages/60/63/72404380ae1d9c96d96e165aa02c66c2aae6072d067fc4713da5cde96762/propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86", size = 221447 }, - { url = "https://files.pythonhosted.org/packages/9d/18/b8392cab6e0964b67a30a8f4dadeaff64dc7022b5a34bb1d004ea99646f4/propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d", size = 222440 }, - { url = "https://files.pythonhosted.org/packages/6f/be/105d9ceda0f97eff8c06bac1673448b2db2a497444de3646464d3f5dc881/propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e", size = 234104 }, - { url = "https://files.pythonhosted.org/packages/cb/c9/f09a4ec394cfcce4053d8b2a04d622b5f22d21ba9bb70edd0cad061fa77b/propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64", size = 239086 }, - { url = "https://files.pythonhosted.org/packages/ea/aa/96f7f9ed6def82db67c972bdb7bd9f28b95d7d98f7e2abaf144c284bf609/propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c", size = 230991 }, - { url = "https://files.pythonhosted.org/packages/5a/11/bee5439de1307d06fad176f7143fec906e499c33d7aff863ea8428b8e98b/propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d", size = 40337 }, - { url = "https://files.pythonhosted.org/packages/e4/17/e5789a54a0455a61cb9efc4ca6071829d992220c2998a27c59aeba749f6f/propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57", size = 44404 }, - { url = "https://files.pythonhosted.org/packages/3a/0f/a79dd23a0efd6ee01ab0dc9750d8479b343bfd0c73560d59d271eb6a99d4/propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568", size = 77287 }, - { url = "https://files.pythonhosted.org/packages/b8/51/76675703c90de38ac75adb8deceb3f3ad99b67ff02a0fa5d067757971ab8/propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9", size = 44923 }, - { url = "https://files.pythonhosted.org/packages/01/9b/fd5ddbee66cf7686e73c516227c2fd9bf471dbfed0f48329d095ea1228d3/propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767", size = 44325 }, - { url = "https://files.pythonhosted.org/packages/13/1c/6961f11eb215a683b34b903b82bde486c606516c1466bf1fa67f26906d51/propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8", size = 225116 }, - { url = "https://files.pythonhosted.org/packages/ef/ea/f8410c40abcb2e40dffe9adeed017898c930974650a63e5c79b886aa9f73/propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0", size = 229905 }, - { url = "https://files.pythonhosted.org/packages/ef/5a/a9bf90894001468bf8e6ea293bb00626cc9ef10f8eb7996e9ec29345c7ed/propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d", size = 233221 }, - { url = "https://files.pythonhosted.org/packages/dd/ce/fffdddd9725b690b01d345c1156b4c2cc6dca09ab5c23a6d07b8f37d6e2f/propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05", size = 227627 }, - { url = "https://files.pythonhosted.org/packages/58/ae/45c89a5994a334735a3032b48e8e4a98c05d9536ddee0719913dc27da548/propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe", size = 214217 }, - { url = "https://files.pythonhosted.org/packages/01/84/bc60188c3290ff8f5f4a92b9ca2d93a62e449c8daf6fd11ad517ad136926/propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1", size = 212921 }, - { url = "https://files.pythonhosted.org/packages/14/b3/39d60224048feef7a96edabb8217dc3f75415457e5ebbef6814f8b2a27b5/propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92", size = 208200 }, - { url = "https://files.pythonhosted.org/packages/9d/b3/0a6720b86791251273fff8a01bc8e628bc70903513bd456f86cde1e1ef84/propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787", size = 208400 }, - { url = "https://files.pythonhosted.org/packages/e9/4f/bb470f3e687790547e2e78105fb411f54e0cdde0d74106ccadd2521c6572/propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545", size = 218116 }, - { url = "https://files.pythonhosted.org/packages/34/71/277f7f9add469698ac9724c199bfe06f85b199542121a71f65a80423d62a/propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e", size = 222911 }, - { url = "https://files.pythonhosted.org/packages/92/e3/a7b9782aef5a2fc765b1d97da9ec7aed2f25a4e985703608e73232205e3f/propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626", size = 216563 }, - { url = "https://files.pythonhosted.org/packages/ab/76/0583ca2c551aa08ffcff87b2c6849c8f01c1f6fb815a5226f0c5c202173e/propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374", size = 39763 }, - { url = "https://files.pythonhosted.org/packages/80/ec/c6a84f9a36f608379b95f0e786c111d5465926f8c62f12be8cdadb02b15c/propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a", size = 43650 }, - { url = "https://files.pythonhosted.org/packages/ee/95/7d32e3560f5bf83fc2f2a4c1b0c181d327d53d5f85ebd045ab89d4d97763/propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf", size = 82140 }, - { url = "https://files.pythonhosted.org/packages/86/89/752388f12e6027a5e63f5d075f15291ded48e2d8311314fff039da5a9b11/propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0", size = 47296 }, - { url = "https://files.pythonhosted.org/packages/1b/4c/b55c98d586c69180d3048984a57a5ea238bdeeccf82dbfcd598e935e10bb/propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829", size = 46724 }, - { url = "https://files.pythonhosted.org/packages/0f/b6/67451a437aed90c4e951e320b5b3d7eb584ade1d5592f6e5e8f678030989/propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa", size = 291499 }, - { url = "https://files.pythonhosted.org/packages/ee/ff/e4179facd21515b24737e1e26e02615dfb5ed29416eed4cf5bc6ac5ce5fb/propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6", size = 293911 }, - { url = "https://files.pythonhosted.org/packages/76/8d/94a8585992a064a23bd54f56c5e58c3b8bf0c0a06ae10e56f2353ae16c3d/propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db", size = 293301 }, - { url = "https://files.pythonhosted.org/packages/b0/b8/2c860c92b4134f68c7716c6f30a0d723973f881c32a6d7a24c4ddca05fdf/propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54", size = 281947 }, - { url = "https://files.pythonhosted.org/packages/cd/72/b564be7411b525d11757b713c757c21cd4dc13b6569c3b2b8f6d3c96fd5e/propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121", size = 268072 }, - { url = "https://files.pythonhosted.org/packages/37/68/d94649e399e8d7fc051e5a4f2334efc567993525af083db145a70690a121/propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e", size = 275190 }, - { url = "https://files.pythonhosted.org/packages/d8/3c/446e125f5bbbc1922964dd67cb541c01cdb678d811297b79a4ff6accc843/propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e", size = 254145 }, - { url = "https://files.pythonhosted.org/packages/f4/80/fd3f741483dc8e59f7ba7e05eaa0f4e11677d7db2077522b92ff80117a2a/propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a", size = 257163 }, - { url = "https://files.pythonhosted.org/packages/dc/cf/6292b5ce6ed0017e6a89024a827292122cc41b6259b30ada0c6732288513/propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac", size = 280249 }, - { url = "https://files.pythonhosted.org/packages/e8/f0/fd9b8247b449fe02a4f96538b979997e229af516d7462b006392badc59a1/propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e", size = 288741 }, - { url = "https://files.pythonhosted.org/packages/64/71/cf831fdc2617f86cfd7f414cfc487d018e722dac8acc098366ce9bba0941/propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf", size = 277061 }, - { url = "https://files.pythonhosted.org/packages/42/78/9432542a35d944abeca9e02927a0de38cd7a298466d8ffa171536e2381c3/propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863", size = 42252 }, - { url = "https://files.pythonhosted.org/packages/6f/45/960365f4f8978f48ebb56b1127adf33a49f2e69ecd46ac1f46d6cf78a79d/propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46", size = 46425 }, - { url = "https://files.pythonhosted.org/packages/b5/35/6c4c6fc8774a9e3629cd750dc24a7a4fb090a25ccd5c3246d127b70f9e22/propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043", size = 12101 }, -] - -[[package]] -name = "pyarrow" -version = "19.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/09/a9046344212690f0632b9c709f9bf18506522feb333c894d0de81d62341a/pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e", size = 1129437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b4/94e828704b050e723f67d67c3535cf7076c7432cd4cf046e4bb3b96a9c9d/pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b", size = 30670749 }, - { url = "https://files.pythonhosted.org/packages/7e/3b/4692965e04bb1df55e2c314c4296f1eb12b4f3052d4cf43d29e076aedf66/pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294", size = 32128007 }, - { url = "https://files.pythonhosted.org/packages/22/f7/2239af706252c6582a5635c35caa17cb4d401cd74a87821ef702e3888957/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14", size = 41144566 }, - { url = "https://files.pythonhosted.org/packages/fb/e3/c9661b2b2849cfefddd9fd65b64e093594b231b472de08ff658f76c732b2/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34", size = 42202991 }, - { url = "https://files.pythonhosted.org/packages/fe/4f/a2c0ed309167ef436674782dfee4a124570ba64299c551e38d3fdaf0a17b/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6", size = 40507986 }, - { url = "https://files.pythonhosted.org/packages/27/2e/29bb28a7102a6f71026a9d70d1d61df926887e36ec797f2e6acfd2dd3867/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832", size = 42087026 }, - { url = "https://files.pythonhosted.org/packages/16/33/2a67c0f783251106aeeee516f4806161e7b481f7d744d0d643d2f30230a5/pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960", size = 25250108 }, - { url = "https://files.pythonhosted.org/packages/2b/8d/275c58d4b00781bd36579501a259eacc5c6dfb369be4ddeb672ceb551d2d/pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c", size = 30653552 }, - { url = "https://files.pythonhosted.org/packages/a0/9e/e6aca5cc4ef0c7aec5f8db93feb0bde08dbad8c56b9014216205d271101b/pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae", size = 32103413 }, - { url = "https://files.pythonhosted.org/packages/6a/fa/a7033f66e5d4f1308c7eb0dfcd2ccd70f881724eb6fd1776657fdf65458f/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4", size = 41134869 }, - { url = "https://files.pythonhosted.org/packages/2d/92/34d2569be8e7abdc9d145c98dc410db0071ac579b92ebc30da35f500d630/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2", size = 42192626 }, - { url = "https://files.pythonhosted.org/packages/0a/1f/80c617b1084fc833804dc3309aa9d8daacd46f9ec8d736df733f15aebe2c/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6", size = 40496708 }, - { url = "https://files.pythonhosted.org/packages/e6/90/83698fcecf939a611c8d9a78e38e7fed7792dcc4317e29e72cf8135526fb/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136", size = 42075728 }, - { url = "https://files.pythonhosted.org/packages/40/49/2325f5c9e7a1c125c01ba0c509d400b152c972a47958768e4e35e04d13d8/pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef", size = 25242568 }, - { url = "https://files.pythonhosted.org/packages/3f/72/135088d995a759d4d916ec4824cb19e066585b4909ebad4ab196177aa825/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0", size = 30702371 }, - { url = "https://files.pythonhosted.org/packages/2e/01/00beeebd33d6bac701f20816a29d2018eba463616bbc07397fdf99ac4ce3/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9", size = 32116046 }, - { url = "https://files.pythonhosted.org/packages/1f/c9/23b1ea718dfe967cbd986d16cf2a31fe59d015874258baae16d7ea0ccabc/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3", size = 41091183 }, - { url = "https://files.pythonhosted.org/packages/3a/d4/b4a3aa781a2c715520aa8ab4fe2e7fa49d33a1d4e71c8fc6ab7b5de7a3f8/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6", size = 42171896 }, - { url = "https://files.pythonhosted.org/packages/23/1b/716d4cd5a3cbc387c6e6745d2704c4b46654ba2668260d25c402626c5ddb/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a", size = 40464851 }, - { url = "https://files.pythonhosted.org/packages/ed/bd/54907846383dcc7ee28772d7e646f6c34276a17da740002a5cefe90f04f7/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8", size = 42085744 }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, -] - -[[package]] -name = "pytz" -version = "2025.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5f/57/df1c9157c8d5a05117e455d66fd7cf6dbc46974f832b1058ed4856785d8a/pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e", size = 319617 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57", size = 507930 }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, -] - -[[package]] -name = "requests" -version = "2.32.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, -] - -[[package]] -name = "tqdm" -version = "4.67.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, -] - -[[package]] -name = "tzdata" -version = "2025.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/0f/fa4723f22942480be4ca9527bbde8d43f6c3f2fe8412f00e7f5f6746bc8b/tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", size = 194950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639", size = 346762 }, -] - -[[package]] -name = "urllib3" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, -] - -[[package]] -name = "xxhash" -version = "3.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/0e/1bfce2502c57d7e2e787600b31c83535af83746885aa1a5f153d8c8059d6/xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00", size = 31969 }, - { url = "https://files.pythonhosted.org/packages/3f/d6/8ca450d6fe5b71ce521b4e5db69622383d039e2b253e9b2f24f93265b52c/xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9", size = 30787 }, - { url = "https://files.pythonhosted.org/packages/5b/84/de7c89bc6ef63d750159086a6ada6416cc4349eab23f76ab870407178b93/xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84", size = 220959 }, - { url = "https://files.pythonhosted.org/packages/fe/86/51258d3e8a8545ff26468c977101964c14d56a8a37f5835bc0082426c672/xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793", size = 200006 }, - { url = "https://files.pythonhosted.org/packages/02/0a/96973bd325412feccf23cf3680fd2246aebf4b789122f938d5557c54a6b2/xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be", size = 428326 }, - { url = "https://files.pythonhosted.org/packages/11/a7/81dba5010f7e733de88af9555725146fc133be97ce36533867f4c7e75066/xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6", size = 194380 }, - { url = "https://files.pythonhosted.org/packages/fb/7d/f29006ab398a173f4501c0e4977ba288f1c621d878ec217b4ff516810c04/xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90", size = 207934 }, - { url = "https://files.pythonhosted.org/packages/8a/6e/6e88b8f24612510e73d4d70d9b0c7dff62a2e78451b9f0d042a5462c8d03/xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27", size = 216301 }, - { url = "https://files.pythonhosted.org/packages/af/51/7862f4fa4b75a25c3b4163c8a873f070532fe5f2d3f9b3fc869c8337a398/xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2", size = 203351 }, - { url = "https://files.pythonhosted.org/packages/22/61/8d6a40f288f791cf79ed5bb113159abf0c81d6efb86e734334f698eb4c59/xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d", size = 210294 }, - { url = "https://files.pythonhosted.org/packages/17/02/215c4698955762d45a8158117190261b2dbefe9ae7e5b906768c09d8bc74/xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab", size = 414674 }, - { url = "https://files.pythonhosted.org/packages/31/5c/b7a8db8a3237cff3d535261325d95de509f6a8ae439a5a7a4ffcff478189/xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e", size = 192022 }, - { url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8", size = 30170 }, - { url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e", size = 30040 }, - { url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2", size = 26796 }, - { url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6", size = 31795 }, - { url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5", size = 30792 }, - { url = "https://files.pythonhosted.org/packages/c3/cc/762312960691da989c7cd0545cb120ba2a4148741c6ba458aa723c00a3f8/xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc", size = 220950 }, - { url = "https://files.pythonhosted.org/packages/fe/e9/cc266f1042c3c13750e86a535496b58beb12bf8c50a915c336136f6168dc/xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3", size = 199980 }, - { url = "https://files.pythonhosted.org/packages/bf/85/a836cd0dc5cc20376de26b346858d0ac9656f8f730998ca4324921a010b9/xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c", size = 428324 }, - { url = "https://files.pythonhosted.org/packages/b4/0e/15c243775342ce840b9ba34aceace06a1148fa1630cd8ca269e3223987f5/xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb", size = 194370 }, - { url = "https://files.pythonhosted.org/packages/87/a1/b028bb02636dfdc190da01951d0703b3d904301ed0ef6094d948983bef0e/xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f", size = 207911 }, - { url = "https://files.pythonhosted.org/packages/80/d5/73c73b03fc0ac73dacf069fdf6036c9abad82de0a47549e9912c955ab449/xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7", size = 216352 }, - { url = "https://files.pythonhosted.org/packages/b6/2a/5043dba5ddbe35b4fe6ea0a111280ad9c3d4ba477dd0f2d1fe1129bda9d0/xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326", size = 203410 }, - { url = "https://files.pythonhosted.org/packages/a2/b2/9a8ded888b7b190aed75b484eb5c853ddd48aa2896e7b59bbfbce442f0a1/xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf", size = 210322 }, - { url = "https://files.pythonhosted.org/packages/98/62/440083fafbc917bf3e4b67c2ade621920dd905517e85631c10aac955c1d2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7", size = 414725 }, - { url = "https://files.pythonhosted.org/packages/75/db/009206f7076ad60a517e016bb0058381d96a007ce3f79fa91d3010f49cc2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c", size = 192070 }, - { url = "https://files.pythonhosted.org/packages/1f/6d/c61e0668943a034abc3a569cdc5aeae37d686d9da7e39cf2ed621d533e36/xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637", size = 30172 }, - { url = "https://files.pythonhosted.org/packages/96/14/8416dce965f35e3d24722cdf79361ae154fa23e2ab730e5323aa98d7919e/xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43", size = 30041 }, - { url = "https://files.pythonhosted.org/packages/27/ee/518b72faa2073f5aa8e3262408d284892cb79cf2754ba0c3a5870645ef73/xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b", size = 26801 }, -] - -[[package]] -name = "yarl" -version = "1.18.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "multidict" }, - { name = "propcache" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644 }, - { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962 }, - { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795 }, - { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368 }, - { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314 }, - { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987 }, - { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914 }, - { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765 }, - { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444 }, - { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760 }, - { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484 }, - { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864 }, - { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537 }, - { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861 }, - { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097 }, - { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399 }, - { url = "https://files.pythonhosted.org/packages/30/c7/c790513d5328a8390be8f47be5d52e141f78b66c6c48f48d241ca6bd5265/yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb", size = 140789 }, - { url = "https://files.pythonhosted.org/packages/30/aa/a2f84e93554a578463e2edaaf2300faa61c8701f0898725842c704ba5444/yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa", size = 94144 }, - { url = "https://files.pythonhosted.org/packages/c6/fc/d68d8f83714b221a85ce7866832cba36d7c04a68fa6a960b908c2c84f325/yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782", size = 91974 }, - { url = "https://files.pythonhosted.org/packages/56/4e/d2563d8323a7e9a414b5b25341b3942af5902a2263d36d20fb17c40411e2/yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0", size = 333587 }, - { url = "https://files.pythonhosted.org/packages/25/c9/cfec0bc0cac8d054be223e9f2c7909d3e8442a856af9dbce7e3442a8ec8d/yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482", size = 344386 }, - { url = "https://files.pythonhosted.org/packages/ab/5d/4c532190113b25f1364d25f4c319322e86232d69175b91f27e3ebc2caf9a/yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186", size = 345421 }, - { url = "https://files.pythonhosted.org/packages/23/d1/6cdd1632da013aa6ba18cee4d750d953104a5e7aac44e249d9410a972bf5/yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58", size = 339384 }, - { url = "https://files.pythonhosted.org/packages/9a/c4/6b3c39bec352e441bd30f432cda6ba51681ab19bb8abe023f0d19777aad1/yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53", size = 326689 }, - { url = "https://files.pythonhosted.org/packages/23/30/07fb088f2eefdc0aa4fc1af4e3ca4eb1a3aadd1ce7d866d74c0f124e6a85/yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2", size = 345453 }, - { url = "https://files.pythonhosted.org/packages/63/09/d54befb48f9cd8eec43797f624ec37783a0266855f4930a91e3d5c7717f8/yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8", size = 341872 }, - { url = "https://files.pythonhosted.org/packages/91/26/fd0ef9bf29dd906a84b59f0cd1281e65b0c3e08c6aa94b57f7d11f593518/yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1", size = 347497 }, - { url = "https://files.pythonhosted.org/packages/d9/b5/14ac7a256d0511b2ac168d50d4b7d744aea1c1aa20c79f620d1059aab8b2/yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a", size = 359981 }, - { url = "https://files.pythonhosted.org/packages/ca/b3/d493221ad5cbd18bc07e642894030437e405e1413c4236dd5db6e46bcec9/yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10", size = 366229 }, - { url = "https://files.pythonhosted.org/packages/04/56/6a3e2a5d9152c56c346df9b8fb8edd2c8888b1e03f96324d457e5cf06d34/yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8", size = 360383 }, - { url = "https://files.pythonhosted.org/packages/fd/b7/4b3c7c7913a278d445cc6284e59b2e62fa25e72758f888b7a7a39eb8423f/yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d", size = 310152 }, - { url = "https://files.pythonhosted.org/packages/f5/d5/688db678e987c3e0fb17867970700b92603cadf36c56e5fb08f23e822a0c/yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c", size = 315723 }, - { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109 }, -] diff --git a/data/lexdk/images/dist_document_length.png b/data/lexdk/images/dist_document_length.png deleted file mode 100644 index 3455955e1db58a7707b0aed5849445e9f19fb6d0..0000000000000000000000000000000000000000 --- a/data/lexdk/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9aead97c97d52f9b4b9fced8eea7827d764a6a91f2af23ddc4e90607d23c0076 -size 551893 diff --git a/data/lexdk/lexdk.md b/data/lexdk/lexdk.md deleted file mode 100644 index ee1710c11415387f3c06504a5a5731396f5f11b8..0000000000000000000000000000000000000000 --- a/data/lexdk/lexdk.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -pretty_name: Lex.dk Open -language: - - da -license: other -license_name: disputed CC-BY-SA 4.0 -task_categories: - - text-generation - - fill-mask -task_ids: - - language-modeling -source_datasets: - - alexandrainst/lexdk-open -domains: - - Encyclopedic ---- - -# Dataset Card for Lex.dk Open - - -Articles from [lex.dk](https://lex.dk). - - -**NOTE:** This dataset been taken down due to concerns about licensing. See [license information](#license-information). - -Lex.dk is a Danish online encyclopedia platform providing access to reliable and authoritative knowledge on a wide range of topics. It is created and curated by experts, ensuring high-quality, accurate content. The platform serves as a central hub for general and specialized information in Danish, making it a valuable resource for education, research, and general learning. - - - - -## Dataset Description - - -- **Language**: dan, dansk, Danish -- **Number of samples**: 11.89K -- **Number of tokens (Llama 3)**: 5.69M -- **Average document length (characters)**: 1405.64 - - - -## Dataset Structure -An example from the dataset looks as follows. - - -```py -{ - "text": "Oluf Høst Museet\n\npubliceret: 2014-04-23 03:42:33+02:00\nOluf Høst Museet, kunstmuseum i Gudhjem, Bor[...]", - "source": "lexdk", - "id": "https://denstoredanske.lex.dk/Oluf_H%C3%B8st_Museet", - "added": "2025-01-04", - "created": "2014-04-23, 2014-04-23", - "license": "cc-by-sa-4.0", - "metadata": { - "source-pretty": "Lex.dk" - } -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `id` (`str`): An unique identifier for each document. -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `license` (`str`): The license of the document. The licenses vary according to the source. -- `domain` (`str`): The domain of the source -- `metadata/source-pretty` (`str`): The long form version of the short-form source name -- `metadata/*`: Potentially additional metadata - - - -### Dataset Statistics - - - - - - - - -## License Information - -The articles here are derived from lex.dk and only include articles with a CC-BY-SA 4.0 licens as defined in the articles. -See [this article](https://lex.dk/Oluf_Høst_Museet) for an example. However we recently discovered the following subsection in their [licensing](https://lex.dk/.licenses/free): - -> Licensen gælder for de enkelte artikeltekster. Du finder information om licens i bunden af alle artikler på lex.dk. Licensen gælder ikke for hele værker, som fx Den Store Danske Encyklopædi eller Trap Danmark. **Du må altså ikke downloade og genbruge alle artikler med fri licens, fordi dette vil være en gengivelse af en stor del af et værk.** - -We are currently examining to what extent it is possible to share this work and until further clarification have chosen to take it down. - - -## Additional Information - -### Citation Information - -This dataset is derived from the publicly availabe dataset [alexandrainst/lexdk-open](https://huggingface.co/datasets/alexandrainst/lexdk-open). \ No newline at end of file diff --git a/data/memo/create.py b/data/memo/create.py deleted file mode 100644 index 6045c32c5303447000ac8d85a58bba314c975186..0000000000000000000000000000000000000000 --- a/data/memo/create.py +++ /dev/null @@ -1,184 +0,0 @@ -# /// script -# requires-python = "==3.12" -# dependencies = [ -# "datasets==3.2.0", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// -""" -Script for downloading and processing the Danish Memo repository. - -Note: To run this script, you need to set `GIT_LFS_SKIP_SMUDGE=1` to be able to install dynaword: - -```bash -GIT_LFS_SKIP_SMUDGE=1 uv run data/memo/create.py -``` - -This second version fixed previous issues with the download and processing of the Danish Memo repository: -https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/67 -""" - -import logging -import subprocess -from datetime import datetime -from pathlib import Path -from typing import Any - -import pandas as pd -from datasets import Dataset - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) - -download_path = Path(__file__).parent / "tmp" - - -def download_repo( - download_path: Path = download_path, - repo_url: str = "https://huggingface.co/datasets/MiMe-MeMo/Corpus-v1.1", - revision="7205897f1f3ee65e296072f3e96d49488e54e8ce", -) -> Path: - """ - Downloads the repository from the given URL to the specified path. - """ - logger.info(f"Downloading repository to {download_path}") - if not download_path.exists(): - download_path.mkdir(parents=True, exist_ok=True) - - repo_path = download_path / repo_url.split("/")[-1] - if repo_path.exists(): - logger.info(f"Repository already exists at {repo_path}, skipping download.") - return repo_path - - # Use git to clone the repository running it from the download path - subprocess.run(["git", "clone", repo_url], check=True, cwd=download_path) - # Checkout the specific revision - subprocess.run(["git", "checkout", revision], check=True, cwd=repo_path) - logger.info("Download complete.") - return repo_path - - -def load_texts(repo_path: Path) -> list[dict[str, str]]: - """ - Loads texts from the downloaded repository. - """ - text_files_path = repo_path / "texts" - text_files = list(text_files_path.glob("*.txt")) - texts = [] - for file in text_files: - name = file.stem - with file.open("r") as f: - content = f.read() - texts.append({"name": name, "text": content}) - logger.info(f"Loaded {len(texts)} texts from the repository.") - return texts - - -def load_memo(repo_path: Path) -> pd.DataFrame: - texts = load_texts(repo_path) - - metadata_csv = repo_path / "MeMo-corpus-metadata-v1.1-2023-06-20.csv" - metadata = pd.read_csv(metadata_csv) - # remove .pdf from "filename" - metadata["filename"] = metadata["filename"].str.replace(".pdf", "", regex=False) - texts_df = pd.DataFrame(texts) - - text_df_fileames = set(texts_df["name"]) - metadata_filenames = set(metadata["filename"]) - - text_without_metadata = [t for t in text_df_fileames if t not in metadata_filenames] - - assert ( - len(text_without_metadata) == 0 - ), f"Some texts in the repository do not have metadata: {text_without_metadata}" - - # merge texts with metadata - merged_df = pd.merge( - texts_df, metadata, left_on="name", right_on="filename", how="inner" - ) - - logger.info(f"Loaded {len(merged_df)} rows from the MeMo dataset.") - return merged_df - - -def convert_to_dynaword_format(memo_df: pd.DataFrame) -> Dataset: - # convert to dynaword samples - samples: list[dict[str, Any]] = [] - for _, row in memo_df.iterrows(): - text = row["text"] - assert isinstance(text, str), f"Text is not a string: {text}" - - # if there is a title then add it to the text - title = row["title"] if pd.notna(row["title"]) else "Ukendt titel" - subtitle = row["subtitle"] if pd.notna(row["subtitle"]) else "" - title = f"{title} {subtitle}".strip() - - first_name = row["firstname"] - last_name = row["surname"] - pseudonym = row["pseudonym"] - - full_name = f"{first_name} {last_name}".strip() - if not full_name: - full_name = pseudonym if pd.notna(pseudonym) else "Ukendt forfatter" - else: - # add pseudonym if it exists - if pd.notna(pseudonym) and pseudonym != full_name: - full_name += f" (Pseudonym: {pseudonym})" - - # create a new text with the title and author - text_new = f"{title}\n\nSkrevet af {full_name}\nPubliceret {row['year']} af {row['publisher']}\n ------- \n\n{text}" - - today = datetime.now().date() - sample = { - "id": row["filename"], - "text": text_new, - "source": "memo", - "added": today.isoformat(), - "created": f"{row['year']}-01-01, {row['year']}-12-31", - } - - samples.append(sample) - - ds = Dataset.from_list(samples) - logger.info(f"Converted to dynaword format with {len(ds)} samples.") - return ds - - ds = convert_to_dynaword_format(memo_df) - - -def main(): - save_path = Path(__file__).parent / "memo.parquet" - - repo_path = download_repo(download_path) - memo_df = load_memo(repo_path) - ds = convert_to_dynaword_format(memo_df) - - # quality checks and processing - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = add_token_count(ds) - ds = ensure_column_order(ds) - - # save to parquet - ds.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / "memo.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/memo/descriptive_stats.json b/data/memo/descriptive_stats.json deleted file mode 100644 index 6eab8a43672f07c4e7043ee1752ad6f80393a1f1..0000000000000000000000000000000000000000 --- a/data/memo/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 858, - "number_of_tokens": 113742425, - "min_length_tokens": 6671, - "max_length_tokens": 720171, - "number_of_characters": 322392717, - "min_length_characters": 18626, - "max_length_characters": 2074215 -} \ No newline at end of file diff --git a/data/memo/images/dist_document_length.png b/data/memo/images/dist_document_length.png deleted file mode 100644 index 72f5e54f8414668203c7009cdf8c47be93fe382d..0000000000000000000000000000000000000000 --- a/data/memo/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3ddef7f93590da187c143a5a1e45fbb29eb2d16cf37f3372c823a1cb282c5f73 -size 541362 diff --git a/data/memo/memo.log b/data/memo/memo.log deleted file mode 100644 index d3fa10d498740b7638852a5fda9559df31cfafb7..0000000000000000000000000000000000000000 --- a/data/memo/memo.log +++ /dev/null @@ -1,20 +0,0 @@ -2025-06-23 15:14:07,867 - INFO - Downloading repository to /Users/au561649/Github/danish-dynaword/data/memo/tmp -2025-06-23 15:14:07,867 - INFO - Repository already exists at /Users/au561649/Github/danish-dynaword/data/memo/tmp/Corpus-v1.1, skipping download. -2025-06-23 15:14:19,489 - INFO - Loaded 858 texts from the repository. -2025-06-23 15:14:19,512 - INFO - Loaded 858 rows from the MeMo dataset. -2025-06-23 15:14:20,848 - INFO - Converted to dynaword format with 858 samples. -2025-06-23 15:14:20,903 - INFO - Removing empty texts -2025-06-23 15:14:25,977 - INFO - Filtered 0 empty examples -2025-06-23 15:14:25,977 - INFO - Removing duplicate texts -2025-06-23 15:14:26,434 - INFO - Filtered 0 duplicate examples -2025-06-23 15:15:40,637 - INFO - Ensuring columns are in the correct order and are present -2025-06-23 15:33:08,880 - INFO - Downloading repository to /Users/au561649/Github/danish-dynaword/data/memo/tmp -2025-06-23 15:33:08,880 - INFO - Repository already exists at /Users/au561649/Github/danish-dynaword/data/memo/tmp/Corpus-v1.1, skipping download. -2025-06-23 15:33:19,998 - INFO - Loaded 858 texts from the repository. -2025-06-23 15:33:20,025 - INFO - Loaded 858 rows from the MeMo dataset. -2025-06-23 15:33:21,332 - INFO - Converted to dynaword format with 858 samples. -2025-06-23 15:33:21,373 - INFO - Removing empty texts -2025-06-23 15:33:25,745 - INFO - Filtered 0 empty examples -2025-06-23 15:33:25,746 - INFO - Removing duplicate texts -2025-06-23 15:33:26,174 - INFO - Filtered 0 duplicate examples -2025-06-23 15:34:37,788 - INFO - Ensuring columns are in the correct order and are present diff --git a/data/memo/memo.md b/data/memo/memo.md deleted file mode 100644 index cb6da0f423eadd64cf2a368b27e5fddbbc110b49..0000000000000000000000000000000000000000 --- a/data/memo/memo.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -pretty_name: MeMo Canonical Novels -language: -- da -license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- MiMe-MeMo/Corpus-v1.1 -domains: -- Books ---- - -# Dataset Card for MeMo Canonical Novels - - -The MeMo corpus comprising almost all Danish novels from the period 1870-1899, known as the Modern Breakthrough. - - -The MeMo corpus is established to investigate literary and cultural change in a seminal epoch of Scandinavian cultural and social history (known as 'the modern breakthrough') using natural language processing and other computational methods. The corpus consists of original novels by Norwegian and Danish authors printed in Denmark in the period 1870-99. It includes 858 volumes, totaling 4.5 million sentences and 65 million words. - -Additional information about this dataset can be found on their [project page](https://nors.ku.dk/english/research/projects/measuring-modernity/) or on their huggingface [dataset](https://huggingface.co/datasets/MiMe-MeMo/Corpus-v1.1). The dataset can be inspected online using [the Korp platform](https://alf.hum.ku.dk/korp/?mode=memo_all#?cqp=%5B%5D&corpus=memo_all). - -## Dataset Description - - -- **Number of samples**: 858 -- **Number of tokens (Llama 3)**: 113.74M -- **Average document length in tokens (min, max)**: 132.57K (6.67K, 720.17K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - -```py -{ - "id": "1887_Paulsen_EnFremtidskvinde", - "text": "En fremtidskvinde?\n\nSkrevet af John Paulsen\nPubliceret 1887 af Schubothe\n ------- \n\nDen skandinavisk[...]", - "source": "memo", - "added": "2025-06-23", - "created": "1887-01-01, 1887-12-31", - "token_count": 98454 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - -### Processing - -In addition to the text itself we prefix the document with the title, year, author name, pseudonym and publisher. This is to allow the model to learn the relation between the document and relevant metadata. - - -### Updated and Corrections - -This version fixed a previous [issues]( https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions/67) in MeMo where the documents where incorrectly truncated and normalized. Removing this truncation led to a >10x increase in number of tokens. - - -## Additional Information - - -### Contact -For questions related to the processing and conversion feel free to open a [discussion thread](https://huggingface.co/datasets/danish-foundation-models/danish-dynaword/discussions) for question related to the initial collect of the data please contact the project PI, Jens Bjerring-Hansen, associate professor at Copenhagen University. - -### Citation Information - -This dataset is derived from the publicly availabe dataset [MiMe-MeMo/Corpus-v1.1](https://huggingface.co/datasets/MiMe-MeMo/Corpus-v1.1) and was release as a part of paper Bjerring-Hansen, Jens, et al. "Mending Fractured Texts. A heuristic procedure for correcting OCR data." (2022). https://ceur-ws.org/Vol-3232/paper14.pdf. Which has the follwing citation: - -``` -@inproceedings{bjerring2022mending, - title={Mending Fractured Texts. A heuristic procedure for correcting OCR data}, - author={Bjerring-Hansen, Jens and Kristensen-McLachlan, Ross Deans and Diderichsen, Philip and Hansen, Dorte Haltrup}, - booktitle={CEUR Workshop Proceedings}, - volume={3232}, - pages={177--186}, - year={2022}, - organization={ceur workshop proceedings} -} -``` - -### Other uses of this dataset - -This study have additionally -``` -@inproceedings{feldkamp_canonical_2024, - address = {Miami, Florida, USA}, - title = {Canonical {Status} and {Literary} {Influence}: {A} {Comparative} {Study} of {Danish} {Novels} from the {Modern} {Breakthrough} (1870--1900)}, - booktitle = {Proceedings of the {Joint} 4th {International} {Conference} on {Natural} {Language} {Processing} for {Digital} {Humanities}}, - publisher = {Association for Computational Linguistics, Forthcoming}, - author = {Feldkamp, Pascale and Lassche, Alie and Kostkan, Jan and Kardos, Márton and Baunvig, Katrine F. and Nielbo, Kristoffer L.}, - year = {2024}, -} -``` diff --git a/data/memo/memo.parquet b/data/memo/memo.parquet deleted file mode 100644 index 5b019a45c1bd7ee4d46a71a17666702561e706c6..0000000000000000000000000000000000000000 --- a/data/memo/memo.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:44002e00b3e876bb6ebd70949723a08310bb022e4e91502c5ec7a64efb6d4706 -size 202092223 diff --git a/data/miljoeportalen/create.py b/data/miljoeportalen/create.py deleted file mode 100644 index 98db9e1ef3fed160fdc4bb084ee1634caacbc1a8..0000000000000000000000000000000000000000 --- a/data/miljoeportalen/create.py +++ /dev/null @@ -1,50 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0", -# ] -# /// - -from pathlib import Path -from typing import cast - -from datasets import Dataset, load_dataset - -source = "miljoeportalen" - - -def convert_sample(example): - new_example = dict( - text_new=example["text"], - source=source, - domain="Web", - license="cc0", - added="2025-03-24", - created="2024-01-01, 2025-01-01", # Scrape happen within the year - data likely written earlier - metadata={"source-pretty": "Miljøportalen"}, - ) - - return new_example - - -def main(): - data_path = Path( - "/work/dfm-data/pre-training/miljoeportal/documents/miljoeportal.jsonl.gz" - ) - ds = load_dataset("json", data_files=data_path.as_posix(), split="train") - - ds = cast(Dataset, ds) - - ds = ds.map(convert_sample, remove_columns=ds.column_names) - ds = ds.rename_columns({"text_new": "text"}) - ds = ds.add_column("id", [f"{source}_{i}" for i in range(len(ds))]) # type: ignore - ds = ds.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - save_path = Path(__file__).parent / f"{source}.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/miljoeportalen/descriptive_stats.json b/data/miljoeportalen/descriptive_stats.json deleted file mode 100644 index 6cac0f082dddff3564591d1f4772d9dbe869eb40..0000000000000000000000000000000000000000 --- a/data/miljoeportalen/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 2120, - "number_of_tokens": 127379475, - "min_length_tokens": 54, - "max_length_tokens": 1436111, - "number_of_characters": 483758823, - "min_length_characters": 148, - "max_length_characters": 18428593 -} \ No newline at end of file diff --git a/data/miljoeportalen/images/dist_document_length.png b/data/miljoeportalen/images/dist_document_length.png deleted file mode 100644 index 6285bf8bb9c982769924df600a53aea0b7907715..0000000000000000000000000000000000000000 --- a/data/miljoeportalen/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:26d1d669944133c88014711e732c8d87b0e81ec45ab24b5e5c72f3754a69c831 -size 553949 diff --git a/data/miljoeportalen/miljoeportalen.md b/data/miljoeportalen/miljoeportalen.md deleted file mode 100644 index dcc5bcb86adc6468d29e374a65b2d4254f4de446..0000000000000000000000000000000000000000 --- a/data/miljoeportalen/miljoeportalen.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -pretty_name: "Milj\xF8portalen" -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Web ---- - -# Dataset Card for Miljøportalen - - -Data from [Danmarks Miljøportalen](https://www.miljoeportal.dk/om-danmarks-miljoeportal/) (Denmark's Environment Portal) - - -Denmark's Environment Portal (Danmarks Miljøportal) is a joint public partnership owned by the state, municipalities, and regions, which aims to support digital environmental management in Denmark. - -Danmarks Miljøportal's goal is for environmental data to be included early in all decisions that have an environmental impact. They do this by creating easy and open access to environmental data, making it possible for authorities and businesses to integrate the environment into their decisions. - -This can be decisions specifically targeted at the environment such as water plans, Green Tripartite Agreement, biodiversity and nature restoration, but also decisions about, for example, renewable energy, climate adaptation, new roads, residential areas, and industrial enterprises, where environmental aspects need to be considered. - - -## Dataset Description - - -- **Number of samples**: 2.12K -- **Number of tokens (Llama 3)**: 127.38M -- **Average document length in tokens (min, max)**: 60.08K (54, 1.44M) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "miljoeportalen_0", - "text": "Bila110 g 1 101 10 - miljTIL R lj TIL RTIL RøraÆTSHUSKO pp ÆTSHUS KOÆTSHUS Kort\n\nLOKALPLAN NR[...]", - "source": "miljoeportalen", - "added": "2025-03-24", - "created": "2024-01-01, 2025-01-01", - "token_count": 9054 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - - - -### License information -This dataset is licensed under CCO this license was clarified by support@miljoeportal.dk: - -> Data er underlagt Creative Common CC0, se: -> https://creativecommons.org/publicdomain/zero/1.0/deed.da. -> -> Lad mig vide hvis du har yderligere spørgsmål. -> Har du spørgsmål til din sag eller yderligere kommentarer, bedes du besvare denne mail. - - - -### Citation Information - -No citation is applicable for this work. diff --git a/data/miljoeportalen/miljoeportalen.parquet b/data/miljoeportalen/miljoeportalen.parquet deleted file mode 100644 index 79ed850356d57f56fbae9458c432adbd2583e660..0000000000000000000000000000000000000000 --- a/data/miljoeportalen/miljoeportalen.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3b3d05d07dbb9c57e1ac45c64c42fe08d814d76d6ffb269b414370670d6fdb15 -size 169046087 diff --git a/data/naat/descriptive_stats.json b/data/naat/descriptive_stats.json deleted file mode 100644 index 3f9964a2dcedaf8e89e1143517074b9b3faf99de..0000000000000000000000000000000000000000 --- a/data/naat/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 129, - "number_of_tokens": 286677, - "min_length_tokens": 228, - "max_length_tokens": 3952, - "number_of_characters": 881378, - "min_length_characters": 664, - "max_length_characters": 12118 -} \ No newline at end of file diff --git a/data/naat/images/dist_document_length.png b/data/naat/images/dist_document_length.png deleted file mode 100644 index 5178bd647e82bd1292fc15a5a73c7e3b179a5ac6..0000000000000000000000000000000000000000 --- a/data/naat/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:72c633f6bf39232bd5c6cac32fea89b14c8a523033f51dc908f9eccc963cbb79 -size 529552 diff --git a/data/naat/naat.md b/data/naat/naat.md index 10903e3d124826f1fce8123dfa81f7e02ffe4b6d..2fa0b7bf892cc71d2322fb410d488f9705126797 100644 --- a/data/naat/naat.md +++ b/data/naat/naat.md @@ -1,93 +1,55 @@ --- pretty_name: NAAT language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Conversation + - language-modeling --- - # Dataset Card for NAAT - - -Danish speeches from 1930-2022. - - - - ## Dataset Description - - - -- **Number of samples**: 129 -- **Number of tokens (Llama 3)**: 286.68K -- **Average document length in tokens (min, max)**: 2.22K (228, 3.95K) - - - - -## Dataset Structure +- **Number of records:** 129 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "naat_1958kongfrederikix", - "text": "Naar jeg i aften sender min nytaarshilsen til det danske folk og tænker tilbage paa det aar, der sva[...]", - "source": "naat", - "added": "2020-02-11", - "created": "1930-01-01, 2022-01-01", - "token_count": 1059 + 'text': 'Naar jeg i aften sender min nytaarshilsen til det ', + 'source': 'naat', + 'id': 'naat_1958kongfrederikix', + 'added': '2020-02-11', + 'created': '1930-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Conversation', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'NAAT' + } } ``` -### Data Fields +## Data Fields -An entry in the dataset consists of the following fields: +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/naat/naat.parquet b/data/naat/naat.parquet index 831e17fe15fe0f319da00ea2db0a50443a95e21a..677610717c76f2bec2aca9867a39667e01eaff7d 100644 --- a/data/naat/naat.parquet +++ b/data/naat/naat.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:742d6d8862c0fd7d14748771815d93d324ab97549e4523daf179c24b0ddc3ac2 -size 544055 +oid sha256:6958784a0c4039e9357dee0dedc6bd010e7dd3573d2d9a4db45ce5e4a6608feb +size 545253 diff --git a/data/ncc_books/create.py b/data/ncc_books/create.py deleted file mode 100644 index c6577365343e8011dfc3af731f4b4991a812c996..0000000000000000000000000000000000000000 --- a/data/ncc_books/create.py +++ /dev/null @@ -1,331 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0" -# ] -# /// - -import inspect -import logging -import re -from collections import defaultdict -from collections.abc import Callable -from datetime import datetime -from pathlib import Path - -import pandas as pd -from datasets import Dataset, load_dataset - -logger = logging.getLogger(__name__) -########## edit manually for each source -hf_path = "NbAiLab/NCC" -source = "ncc_books" -doc_type_searchword = "book" -license = "cc0-1.0" -domain = "Wiki & Books" -num_proc = 10 -########## -today = datetime.now().strftime("%Y-%m-%d") - -# stop words taken from spaCy -# https://github.com/explosion/spaCy/blob/master/spacy/lang/da/stop_words.py -# Source: Handpicked by Jens Dahl Møllerhøj. -spacy_sw = set( - """ -af aldrig alene alle allerede alligevel alt altid anden andet andre at - -bag begge blandt blev blive bliver burde bør - -da de dem den denne dens der derefter deres derfor derfra deri dermed derpå derved det dette dig din dine disse dog du - -efter egen eller ellers en end endnu ene eneste enhver ens enten er et - -flere flest fleste for foran fordi forrige fra få før først - -gennem gjorde gjort god gør gøre gørende - -ham han hans har havde have hel heller hen hende hendes henover her herefter heri hermed herpå hun hvad hvem hver hvilke hvilken hvilkes hvis hvor hvordan hvorefter hvorfor hvorfra hvorhen hvori hvorimod hvornår hvorved - -i igen igennem ikke imellem imens imod ind indtil ingen intet - -jeg jer jeres jo - -kan kom kommer kun kunne - -lad langs lav lave lavet lidt lige ligesom lille længere - -man mange med meget mellem men mens mere mest mig min mindre mindst mine mit må måske - -ned nemlig nogen nogensinde noget nogle nok nu ny nyt nær næste næsten - -og også om omkring op os over overalt - -på - -samme sammen selv selvom senere ses siden sig sige skal skulle som stadig synes syntes så sådan således - -temmelig tidligere til tilbage tit - -ud uden udover under undtagen - -var ved vi via vil ville vore vores vær være været - -øvrigt -""".split() -) - - -def word_tokenize(text: str) -> list[str]: - """ - Tokenizes a string into words, splitting on whitespace and punctuation. - - Example: - >>> word_tokenize("Hello, world!") - ['Hello', ',', 'world', '!'] - >>> word_tokenize("This is a test.") - ['This', 'is', 'a', 'test', '.'] - >>> word_tokenize("Many spaces between words.") - ['Many', 'spaces', 'between', 'words', '.'] - """ - - punkt = [",", ".", "!", "?", ":", ";", "(", ")", "[", "]", "{", "}", '"', "'"] - for p in punkt: - text = text.replace(p, f" {p} ") - return text.split() - - -def alpha_ratio(text: str | list[str]) -> float: - """ - If not split already to words, splits text with word_tokenize() - Calculates ratio of words with only alphabetical characters - - """ - if type(text) is str: - text = word_tokenize(text) - else: - pass - - alpha_ratio = 1 - sum(not word.isalpha() for word in text) / len(text) - - return alpha_ratio - - -def count_min_target(given_list: list, target_list: list, min: int) -> bool: - """ - Iterates through given list, until at least min items match any items from target list - - """ - c_item = 0 - given_list_iter = iter(given_list) - while c_item < min: - try: - current_item = next(given_list_iter) - if current_item in target_list: - c_item += 1 - except StopIteration: - break - - return c_item == min - - -def dynaword_format( - meta_document: dict[str, str | int], -) -> dict[str, str | dict[str, str]]: - """Reformats data to fit dynaword standards""" - - text = meta_document.get("text") - id = meta_document.get("id") - date = meta_document.get("publish_year") - doc_type = meta_document.get("doc_type") - - newdata = { - "text": text, - "source": source, - "id": id, - "added": today, - "created": f"{date}-01-01, {date}-12-31", - "license": license, - "domain": domain, - "metadata": { - "source-pretty": f"Norwegian Colossal Corpus ({re.sub('ncc_', '', source)})", - "source-type": doc_type, - }, - } - - return newdata - - -def log_pre_filter_lang_data( - lang_metadata: dict[str, dict[str, int]], filtered_ds: Dataset -): - """ - Function for logging changes in a large dataset, - based on the metadata pre filering and the filtered dataset, - used for language filtering - """ - all_docs = sum(lang_metadata[source].values()) - no_docs = lang_metadata[source].get("no") - da_docs = lang_metadata[source].get("da") - no_perc = round(no_docs / all_docs * 100, 4) - da_perc = round(da_docs / all_docs * 100, 4) - - f_length = len(filtered_ds) - f_perc = round(f_length / da_docs * 100, 4) - f_total_perc = round(f_length / all_docs * 100, 4) - - logger.info(f"Documents of {source}:") - logger.info(f"NO: {no_docs}, {no_perc}% ; DA: {da_docs}, {da_perc}%") - logger.info("After language confidence filtering:") - logger.info(f"DA: {f_length}, lost: {100 - f_perc}%") - logger.info("Total document change:") - logger.info(f"{all_docs} -> {f_length}, loss: {100 - f_total_perc}%") - - -def get_var_name(var): - """outputs the variable name""" - callers_local_vars = inspect.currentframe().f_back.f_back.f_back.f_locals.items() - return [var_name for var_name, var_val in callers_local_vars if var_val is var] - - -def filter_with_changelog( - filter_func: Callable[[Dataset], Dataset], dataset: Dataset -) -> Dataset: - """ - Function, which takes a filter and a dataset. - Counts text docs and tokens before and after filtering, - Saves filtering changes to log. - """ - - filter_name = get_var_name(filter_func) - pre_filter_docs = len(dataset) - pre_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - - dataset = dataset.filter(filter_func, num_proc=num_proc) - - post_filter_docs = len(dataset) - post_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - tokens_removed = round((1 - (post_filter_tokens / pre_filter_tokens)) * 100, 2) - docs_removed = round((1 - (post_filter_docs / pre_filter_docs)) * 100, 2) - - logger.info(f"FILTER: {filter_name}") - logger.info( - f"TOKENS: pre: {pre_filter_tokens}, post: {post_filter_tokens}, loss: {tokens_removed}%" - ) - logger.info( - f"DOCUMENTS: pre: {pre_filter_docs}, post: {post_filter_docs}, loss: {docs_removed}%" - ) - - return dataset - - -source_filter = lambda ds: doc_type_searchword in ds["doc_type"] # noqa -length_filter = lambda ds: len(word_tokenize(ds["text"])) >= 10 # noqa -too_long_filter = lambda ds: len(word_tokenize(ds["text"])) > 1e5 # noqa -alpha_filter = lambda ds: alpha_ratio(ds["text"]) >= 0.7 # noqa -stop_word_filter = lambda ds: count_min_target(word_tokenize(ds["text"]), spacy_sw, 2) # noqa - -samples_pr_source: dict = defaultdict(lambda: defaultdict(int)) - - -def language_filter_with_desc_stats(ds: Dataset) -> bool: - """ - Language filtering in a streamed dataset while logging all languages - """ - s = source - language = ds["lang_fasttext"] - samples_pr_source[s][language] += 1 - - language_filter = ( - ds["lang_fasttext"] == "da" and float(ds["lang_fasttext_conf"]) >= 0.75 - ) - - return language_filter - - -def quality_checks(ds: Dataset) -> Dataset: - """ - Quality checks for: - - no duplicate ids - - no duplicate texts - - logs texts > 1e5 tokens - """ - # convert to pandas for the drop_duplicates() - df = pd.DataFrame(ds) - # remove duplicate ids - len_df = len(df) - df = df.drop_duplicates(subset=["id"]) - logger.info(f"Removed {len_df - len(df)} duplicate ids") - # remove rows with duplicate text - len_df = len(df) - df = df.drop_duplicates(subset=["text"]) - logger.info(f"Removed {len_df - len(df)} rows with duplicate text") - # reconvert and remove index - ds_f = Dataset.from_pandas(df, preserve_index=False) - try: - ds_f["__index_level_0__"] - ds_f = ds_f.remove_columns("__index_level_0__") - except KeyError: - pass - - assert len(set(ds_f["id"])) == len(ds_f), "IDs are not unique" - assert len(set(ds_f["text"])) == len(ds_f), "Texts are not unique" - - long_texts = ds_f.filter(too_long_filter, num_proc=None) - if len(long_texts["id"]) > 0: - logger.info(f"{len(long_texts['id'])} Long texts (>~1e5 tokens) found") - for id in long_texts["id"]: - logger.info(f"id: {id}") - else: - logger.info("No long texts (>~1e5 tokens) found") - - return ds_f - - -def main(): - # load all splits - logger.info(f"Loading data from: {hf_path}") - danish_data = load_dataset( - hf_path, streaming=False, split="train+validation", num_proc=num_proc - ) - danish_data.cleanup_cache_files() - - # filter by metadata - logger.info(f"Processing source: {source}") - danish_data = danish_data.filter(source_filter, num_proc=num_proc) - - logger.info("Processing language") - danish_data = danish_data.filter(language_filter_with_desc_stats, num_proc=None) - - # log language changes - log_pre_filter_lang_data(samples_pr_source, danish_data) - - # convert to dynaword format - danish_data = danish_data.map(dynaword_format) - danish_data = danish_data.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - # filter and log changes - danish_data = filter_with_changelog(length_filter, danish_data) - danish_data = filter_with_changelog(alpha_filter, danish_data) - danish_data = filter_with_changelog(stop_word_filter, danish_data) - - # Quality checks - danish_data = quality_checks(danish_data) - - ### saving - save_path = Path(__file__).parent / f"{source}.parquet" - danish_data.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{source}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/ncc_books/descriptive_stats.json b/data/ncc_books/descriptive_stats.json deleted file mode 100644 index 23feb6d0cb49eb2d57a7017daff418890e66a0ce..0000000000000000000000000000000000000000 --- a/data/ncc_books/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 4902, - "number_of_tokens": 531969285, - "min_length_tokens": 58, - "max_length_tokens": 383508, - "number_of_characters": 1539542960, - "min_length_characters": 153, - "max_length_characters": 1000000 -} \ No newline at end of file diff --git a/data/ncc_books/images/dist_document_length.png b/data/ncc_books/images/dist_document_length.png deleted file mode 100644 index 063eb6f81fb26c77dd9aef2aad5a792b947ea6be..0000000000000000000000000000000000000000 --- a/data/ncc_books/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d2e82bbcf92c59ad1b9595a04cb8d59c7ddc3dc87588cadaf111e00ac88901d9 -size 551879 diff --git a/data/ncc_books/ncc_books.log b/data/ncc_books/ncc_books.log deleted file mode 100644 index 53f53996344c30fd96885ac6855a72849316dd42..0000000000000000000000000000000000000000 --- a/data/ncc_books/ncc_books.log +++ /dev/null @@ -1,1081 +0,0 @@ -2025-05-08 08:54:49,165 - INFO - Loading data from: NbAiLab/NCC -2025-05-08 08:54:58,162 - INFO - Processing source: ncc_books -2025-05-08 08:56:46,434 - INFO - Processing language -2025-05-08 08:57:22,861 - INFO - Documents of ncc_books: -2025-05-08 08:57:22,867 - INFO - NO: 12496, 51.5235% ; DA: 8443, 34.8122% -2025-05-08 08:57:22,867 - INFO - After language confidence filtering: -2025-05-08 08:57:22,867 - INFO - DA: 5125, lost: 39.2988% -2025-05-08 08:57:22,867 - INFO - Total document change: -2025-05-08 08:57:22,867 - INFO - 24253 -> 5125, loss: 78.8686% -2025-05-08 09:01:43,677 - INFO - FILTER: ['length_filter'] -2025-05-08 09:01:43,688 - INFO - TOKENS: pre: 322682794, post: 322682794, loss: 0.0% -2025-05-08 09:01:43,688 - INFO - DOCUMENTS: pre: 5125, post: 5125, loss: 0.0% -2025-05-08 09:05:38,723 - INFO - FILTER: ['alpha_filter'] -2025-05-08 09:05:38,724 - INFO - TOKENS: pre: 322682794, post: 307341040, loss: 4.75% -2025-05-08 09:05:38,726 - INFO - DOCUMENTS: pre: 5125, post: 4902, loss: 4.35% -2025-05-08 09:09:44,812 - INFO - FILTER: ['stop_word_filter'] -2025-05-08 09:09:44,812 - INFO - TOKENS: pre: 307341040, post: 307341040, loss: 0.0% -2025-05-08 09:09:44,812 - INFO - DOCUMENTS: pre: 4902, post: 4902, loss: 0.0% -2025-05-08 09:09:50,536 - INFO - Removed 0 duplicate ids -2025-05-08 09:09:52,320 - INFO - Removed 0 rows with duplicate text -2025-05-08 09:11:29,706 - INFO - 1060 Long texts (>~1e5 tokens) found -2025-05-08 09:11:29,719 - INFO - id: digibok_2009033103031 -2025-05-08 09:11:29,720 - INFO - id: digibok_2009100812001_part0 -2025-05-08 09:11:29,720 - INFO - id: digibok_2006120501011 -2025-05-08 09:11:29,720 - INFO - id: digibok_2020022148516 -2025-05-08 09:11:29,720 - INFO - id: digibok_2013121108171 -2025-05-08 09:11:29,720 - INFO - id: digibok_2009022603010 -2025-05-08 09:11:29,720 - INFO - id: digibok_2006081100026 -2025-05-08 09:11:29,720 - INFO - id: digibok_2006111500023 -2025-05-08 09:11:29,720 - INFO - id: digibok_2006112900002_part2 -2025-05-08 09:11:29,720 - INFO - id: digibok_2010062803022_part0 -2025-05-08 09:11:29,720 - INFO - id: digibok_2009030603018 -2025-05-08 09:11:29,720 - INFO - id: digibok_2009021803022 -2025-05-08 09:11:29,720 - INFO - id: digibok_2006081600023 -2025-05-08 09:11:29,727 - INFO - id: digibok_2006120100046 -2025-05-08 09:11:29,727 - INFO - id: digibok_2006113000002_part0 -2025-05-08 09:11:29,727 - INFO - id: digibok_2008010713001_part0 -2025-05-08 09:11:29,727 - INFO - id: digibok_2006081700017 -2025-05-08 09:11:29,727 - INFO - id: digibok_2011051004069 -2025-05-08 09:11:29,729 - INFO - id: digibok_2006082200056 -2025-05-08 09:11:29,729 - INFO - id: digibok_2010072823001_part0 -2025-05-08 09:11:29,729 - INFO - id: digibok_2006120101041_part1 -2025-05-08 09:11:29,729 - INFO - id: digibok_2010021603029 -2025-05-08 09:11:29,729 - INFO - id: digibok_2013040507003 -2025-05-08 09:11:29,729 - INFO - id: digibok_2008052002001 -2025-05-08 09:11:29,729 - INFO - id: digibok_2011052004053 -2025-05-08 09:11:29,729 - INFO - id: digibok_2008033104063_part0 -2025-05-08 09:11:29,729 - INFO - id: digibok_2008031710002 -2025-05-08 09:11:29,729 - INFO - id: digibok_2007042604025 -2025-05-08 09:11:29,729 - INFO - id: digibok_2008042110002_part0 -2025-05-08 09:11:29,729 - INFO - id: digibok_2008040200063 -2025-05-08 09:11:29,732 - INFO - id: digibok_2008040204088_part1 -2025-05-08 09:11:29,732 - INFO - id: digibok_2009021204070 -2025-05-08 09:11:29,732 - INFO - id: digibok_2010060906021 -2025-05-08 09:11:29,733 - INFO - id: digibok_2008012303001_part0 -2025-05-08 09:11:29,733 - INFO - id: digibok_2009070601009_part0 -2025-05-08 09:11:29,733 - INFO - id: digibok_2009091403018 -2025-05-08 09:11:29,733 - INFO - id: digibok_2006082200020 -2025-05-08 09:11:29,735 - INFO - id: digibok_2006113000046_part0 -2025-05-08 09:11:29,736 - INFO - id: digibok_2007042712001_part0 -2025-05-08 09:11:29,736 - INFO - id: digibok_2011050604113_part0 -2025-05-08 09:11:29,737 - INFO - id: digibok_2010070705001 -2025-05-08 09:11:29,737 - INFO - id: digibok_2007070403001_part0 -2025-05-08 09:11:29,737 - INFO - id: digibok_2007072312003 -2025-05-08 09:11:29,739 - INFO - id: digibok_2006083000067_part0 -2025-05-08 09:11:29,740 - INFO - id: digibok_2014071606003 -2025-05-08 09:11:29,740 - INFO - id: digibok_2009022603020_part0 -2025-05-08 09:11:29,740 - INFO - id: digibok_2009050503004 -2025-05-08 09:11:29,740 - INFO - id: digibok_2010042106089 -2025-05-08 09:11:29,740 - INFO - id: digibok_2006112300012 -2025-05-08 09:11:29,740 - INFO - id: digibok_2008050800076 -2025-05-08 09:11:29,742 - INFO - id: digibok_2006082800058 -2025-05-08 09:11:29,743 - INFO - id: digibok_2007072702001 -2025-05-08 09:11:29,743 - INFO - id: digibok_2006112900015 -2025-05-08 09:11:29,743 - INFO - id: digibok_2008111700103 -2025-05-08 09:11:29,743 - INFO - id: digibok_2017100548132 -2025-05-08 09:11:29,743 - INFO - id: digibok_2010021513001 -2025-05-08 09:11:29,743 - INFO - id: digibok_2008022710007_part1 -2025-05-08 09:11:29,743 - INFO - id: digibok_2010022203001 -2025-05-08 09:11:29,743 - INFO - id: digibok_2006113001027 -2025-05-08 09:11:29,743 - INFO - id: digibok_2006111701031 -2025-05-08 09:11:29,743 - INFO - id: digibok_2008090503002_part1 -2025-05-08 09:11:29,743 - INFO - id: digibok_2006112300023 -2025-05-08 09:11:29,747 - INFO - id: digibok_2010111208080_part0 -2025-05-08 09:11:29,747 - INFO - id: digibok_2008040104074 -2025-05-08 09:11:29,747 - INFO - id: digibok_2006081000022 -2025-05-08 09:11:29,748 - INFO - id: digibok_2010053106087_part0 -2025-05-08 09:11:29,748 - INFO - id: digibok_2006120101074_part0 -2025-05-08 09:11:29,748 - INFO - id: digibok_2008040400005_part1 -2025-05-08 09:11:29,748 - INFO - id: digibok_2008040204058 -2025-05-08 09:11:29,748 - INFO - id: digibok_2007060410002 -2025-05-08 09:11:29,748 - INFO - id: digibok_2009071703006 -2025-05-08 09:11:29,748 - INFO - id: digibok_2006120800065_part0 -2025-05-08 09:11:29,748 - INFO - id: digibok_2010052806029_part2 -2025-05-08 09:11:29,748 - INFO - id: digibok_2008040300055 -2025-05-08 09:11:29,748 - INFO - id: digibok_2009061503004_part0 -2025-05-08 09:11:29,751 - INFO - id: digibok_2006120100001 -2025-05-08 09:11:29,752 - INFO - id: digibok_2019121748008 -2025-05-08 09:11:29,753 - INFO - id: digibok_2006120500027 -2025-05-08 09:11:29,753 - INFO - id: digibok_2008031812001_part0 -2025-05-08 09:11:29,754 - INFO - id: digibok_2008040200088 -2025-05-08 09:11:29,754 - INFO - id: digibok_2006112300061 -2025-05-08 09:11:29,754 - INFO - id: digibok_2006080900029_part0 -2025-05-08 09:11:29,754 - INFO - id: digibok_2009021603006_part0 -2025-05-08 09:11:29,755 - INFO - id: digibok_2010070205080 -2025-05-08 09:11:29,756 - INFO - id: digibok_2009092303053_part2 -2025-05-08 09:11:29,756 - INFO - id: digibok_2009091503033_part0 -2025-05-08 09:11:29,756 - INFO - id: digibok_2006111501033 -2025-05-08 09:11:29,756 - INFO - id: digibok_2008012410001 -2025-05-08 09:11:29,756 - INFO - id: digibok_2006120801017 -2025-05-08 09:11:29,756 - INFO - id: digibok_2006112900013_part0 -2025-05-08 09:11:29,759 - INFO - id: digibok_2006111401047_part0 -2025-05-08 09:11:29,759 - INFO - id: digibok_2006111500058_part0 -2025-05-08 09:11:29,760 - INFO - id: digibok_2006112401006 -2025-05-08 09:11:29,760 - INFO - id: digibok_2008102300057 -2025-05-08 09:11:29,760 - INFO - id: digibok_2010021812002 -2025-05-08 09:11:29,760 - INFO - id: digibok_2007073103001_part0 -2025-05-08 09:11:29,760 - INFO - id: digibok_2006082200023_part0 -2025-05-08 09:11:29,760 - INFO - id: digibok_2009080404032 -2025-05-08 09:11:29,760 - INFO - id: digibok_2008020110006_part0 -2025-05-08 09:11:29,760 - INFO - id: digibok_2010061506028_part0 -2025-05-08 09:11:29,760 - INFO - id: digibok_2006120600082 -2025-05-08 09:11:29,763 - INFO - id: digibok_2008040912002_part0 -2025-05-08 09:11:29,763 - INFO - id: digibok_2007081002003 -2025-05-08 09:11:29,763 - INFO - id: digibok_2016010408004 -2025-05-08 09:11:29,763 - INFO - id: digibok_2009091403016_part0 -2025-05-08 09:11:29,763 - INFO - id: digibok_2010052706047_part0 -2025-05-08 09:11:29,763 - INFO - id: digibok_2009041412001 -2025-05-08 09:11:29,763 - INFO - id: digibok_2014120108068 -2025-05-08 09:11:29,763 - INFO - id: digibok_2009082503028 -2025-05-08 09:11:29,763 - INFO - id: digibok_2014042338013 -2025-05-08 09:11:29,763 - INFO - id: digibok_2009061903026_part0 -2025-05-08 09:11:29,763 - INFO - id: digibok_2009022603020_part1 -2025-05-08 09:11:29,763 - INFO - id: digibok_2008040300053 -2025-05-08 09:11:29,763 - INFO - id: digibok_2006112300047_part0 -2025-05-08 09:11:29,763 - INFO - id: digibok_2007011101055_part0 -2025-05-08 09:11:29,769 - INFO - id: digibok_2010092803020 -2025-05-08 09:11:29,770 - INFO - id: digibok_2011050604039 -2025-05-08 09:11:29,770 - INFO - id: digibok_2006111500013 -2025-05-08 09:11:29,770 - INFO - id: digibok_2007050200026_part0 -2025-05-08 09:11:29,770 - INFO - id: digibok_2008040204013 -2025-05-08 09:11:29,770 - INFO - id: digibok_2010010703003_part0 -2025-05-08 09:11:29,770 - INFO - id: digibok_2009042810003 -2025-05-08 09:11:29,773 - INFO - id: digibok_2010052806029_part1 -2025-05-08 09:11:29,773 - INFO - id: digibok_2009071000007 -2025-05-08 09:11:29,774 - INFO - id: digibok_2009010503023 -2025-05-08 09:11:29,774 - INFO - id: digibok_2010052706011 -2025-05-08 09:11:29,774 - INFO - id: digibok_2009081303007 -2025-05-08 09:11:29,775 - INFO - id: digibok_2008040104022_part0 -2025-05-08 09:11:29,776 - INFO - id: digibok_2008042204068 -2025-05-08 09:11:29,776 - INFO - id: digibok_2009092303065 -2025-05-08 09:11:29,776 - INFO - id: digibok_2006120401124 -2025-05-08 09:11:29,777 - INFO - id: digibok_2006120100027 -2025-05-08 09:11:29,777 - INFO - id: digibok_2006112101043 -2025-05-08 09:11:29,778 - INFO - id: digibok_2009060800069_part0 -2025-05-08 09:11:29,778 - INFO - id: digibok_2008110503008 -2025-05-08 09:11:29,779 - INFO - id: digibok_2006111501005_part0 -2025-05-08 09:11:29,779 - INFO - id: digibok_2008111103034 -2025-05-08 09:11:29,779 - INFO - id: digibok_2009061503007_part0 -2025-05-08 09:11:29,779 - INFO - id: digibok_2010052606064_part0 -2025-05-08 09:11:29,779 - INFO - id: digibok_2009103000014_part0 -2025-05-08 09:11:29,779 - INFO - id: digibok_2014020328037 -2025-05-08 09:11:29,779 - INFO - id: digibok_2006112000072 -2025-05-08 09:11:29,779 - INFO - id: digibok_2008091603024 -2025-05-08 09:11:29,779 - INFO - id: digibok_2015013008098 -2025-05-08 09:11:29,779 - INFO - id: digibok_2006083000030 -2025-05-08 09:11:29,779 - INFO - id: digibok_2011020906004_part0 -2025-05-08 09:11:29,779 - INFO - id: digibok_2008033104065_part0 -2025-05-08 09:11:29,779 - INFO - id: digibok_2008092603006 -2025-05-08 09:11:29,783 - INFO - id: digibok_2008121603015 -2025-05-08 09:11:29,783 - INFO - id: digibok_2010110806018 -2025-05-08 09:11:29,783 - INFO - id: digibok_2006111501012_part0 -2025-05-08 09:11:29,784 - INFO - id: digibok_2006120101036 -2025-05-08 09:11:29,784 - INFO - id: digibok_2019121648011 -2025-05-08 09:11:29,785 - INFO - id: digibok_2010070205140 -2025-05-08 09:11:29,785 - INFO - id: digibok_2006113000046_part1 -2025-05-08 09:11:29,785 - INFO - id: digibok_2008011113001_part1 -2025-05-08 09:11:29,786 - INFO - id: digibok_2006112300039_part0 -2025-05-08 09:11:29,786 - INFO - id: digibok_2006111400027 -2025-05-08 09:11:29,787 - INFO - id: digibok_2008050610001_part0 -2025-05-08 09:11:29,787 - INFO - id: digibok_2006112101014 -2025-05-08 09:11:29,788 - INFO - id: digibok_2011011906049_part1 -2025-05-08 09:11:29,788 - INFO - id: digibok_2006112300038_part1 -2025-05-08 09:11:29,788 - INFO - id: digibok_2010052706050 -2025-05-08 09:11:29,788 - INFO - id: digibok_2008100603016_part0 -2025-05-08 09:11:29,788 - INFO - id: digibok_2008010212002 -2025-05-08 09:11:29,788 - INFO - id: digibok_2008091003001 -2025-05-08 09:11:29,788 - INFO - id: digibok_2009062204053 -2025-05-08 09:11:29,788 - INFO - id: digibok_2010071323005 -2025-05-08 09:11:29,788 - INFO - id: digibok_2010051906059 -2025-05-08 09:11:29,788 - INFO - id: digibok_2008042110001 -2025-05-08 09:11:29,788 - INFO - id: digibok_2007102902001 -2025-05-08 09:11:29,788 - INFO - id: digibok_2006120400057_part0 -2025-05-08 09:11:29,788 - INFO - id: digibok_2010020503005_part0 -2025-05-08 09:11:29,788 - INFO - id: digibok_2009081903024 -2025-05-08 09:11:29,788 - INFO - id: digibok_2009051203032 -2025-05-08 09:11:29,788 - INFO - id: digibok_2006082800125_part0 -2025-05-08 09:11:29,795 - INFO - id: digibok_2007073104014 -2025-05-08 09:11:29,795 - INFO - id: digibok_2011051320022 -2025-05-08 09:11:29,795 - INFO - id: digibok_2006082200032 -2025-05-08 09:11:29,795 - INFO - id: digibok_2006120101041_part0 -2025-05-08 09:11:29,795 - INFO - id: digibok_2010061706084 -2025-05-08 09:11:29,795 - INFO - id: digibok_2009020203028_part0 -2025-05-08 09:11:29,795 - INFO - id: digibok_2006111501023 -2025-05-08 09:11:29,795 - INFO - id: digibok_2007070203001 -2025-05-08 09:11:29,795 - INFO - id: digibok_2009030403001 -2025-05-08 09:11:29,795 - INFO - id: digibok_2008042112004 -2025-05-08 09:11:29,795 - INFO - id: digibok_2008042800013 -2025-05-08 09:11:29,795 - INFO - id: digibok_2006112101041 -2025-05-08 09:11:29,795 - INFO - id: digibok_2008040200060 -2025-05-08 09:11:29,795 - INFO - id: digibok_2007041802003 -2025-05-08 09:11:29,799 - INFO - id: digibok_2011011905046_part0 -2025-05-08 09:11:29,799 - INFO - id: digibok_2010021700056_part0 -2025-05-08 09:11:29,799 - INFO - id: digibok_2006120400056_part0 -2025-05-08 09:11:29,799 - INFO - id: digibok_2009061503010 -2025-05-08 09:11:29,799 - INFO - id: digibok_2006111601025 -2025-05-08 09:11:29,799 - INFO - id: digibok_2006112000014 -2025-05-08 09:11:29,799 - INFO - id: digibok_2006112300038_part0 -2025-05-08 09:11:29,799 - INFO - id: digibok_2009020200052_part0 -2025-05-08 09:11:29,799 - INFO - id: digibok_2008090303002 -2025-05-08 09:11:29,802 - INFO - id: digibok_2006112901014_part1 -2025-05-08 09:11:29,803 - INFO - id: digibok_2008040201013 -2025-05-08 09:11:29,803 - INFO - id: digibok_2009092303064 -2025-05-08 09:11:29,803 - INFO - id: digibok_2008010713001_part2 -2025-05-08 09:11:29,803 - INFO - id: digibok_2006112000030 -2025-05-08 09:11:29,803 - INFO - id: digibok_2010052706013 -2025-05-08 09:11:29,803 - INFO - id: digibok_2011011905059 -2025-05-08 09:11:29,805 - INFO - id: digibok_2006112200023_part1 -2025-05-08 09:11:29,805 - INFO - id: digibok_2008020103003_part0 -2025-05-08 09:11:29,806 - INFO - id: digibok_2013061024001_part0 -2025-05-08 09:11:29,806 - INFO - id: digibok_2009042203002 -2025-05-08 09:11:29,806 - INFO - id: digibok_2006112201009_part0 -2025-05-08 09:11:29,808 - INFO - id: digibok_2006112300027 -2025-05-08 09:11:29,808 - INFO - id: digibok_2009071303008 -2025-05-08 09:11:29,809 - INFO - id: digibok_2008012813001_part1 -2025-05-08 09:11:29,809 - INFO - id: digibok_2008111303022 -2025-05-08 09:11:29,810 - INFO - id: digibok_2006112101013 -2025-05-08 09:11:29,810 - INFO - id: digibok_2008012312001_part1 -2025-05-08 09:11:29,810 - INFO - id: digibok_2008121603019 -2025-05-08 09:11:29,811 - INFO - id: digibok_2008040104091 -2025-05-08 09:11:29,811 - INFO - id: digibok_2008040904002 -2025-05-08 09:11:29,811 - INFO - id: digibok_2007082712002 -2025-05-08 09:11:29,811 - INFO - id: digibok_2010011903026_part0 -2025-05-08 09:11:29,811 - INFO - id: digibok_2011051320019 -2025-05-08 09:11:29,811 - INFO - id: digibok_2006111700069 -2025-05-08 09:11:29,811 - INFO - id: digibok_2011051310001 -2025-05-08 09:11:29,811 - INFO - id: digibok_2008061904128 -2025-05-08 09:11:29,811 - INFO - id: digibok_2006121400088 -2025-05-08 09:11:29,811 - INFO - id: digibok_2006120500047 -2025-05-08 09:11:29,814 - INFO - id: digibok_2011011320011_part0 -2025-05-08 09:11:29,814 - INFO - id: digibok_2006121300075 -2025-05-08 09:11:29,814 - INFO - id: digibok_2008040904008_part0 -2025-05-08 09:11:29,815 - INFO - id: digibok_2011050604033_part0 -2025-05-08 09:11:29,816 - INFO - id: digibok_2010053106091 -2025-05-08 09:11:29,816 - INFO - id: digibok_2006112801000_part1 -2025-05-08 09:11:29,816 - INFO - id: digibok_2009043012001 -2025-05-08 09:11:29,816 - INFO - id: digibok_2006111500058_part2 -2025-05-08 09:11:29,816 - INFO - id: digibok_2014072908110 -2025-05-08 09:11:29,816 - INFO - id: digibok_2006081000032 -2025-05-08 09:11:29,816 - INFO - id: digibok_2012092108074_part0 -2025-05-08 09:11:29,816 - INFO - id: digibok_2008010203002 -2025-05-08 09:11:29,816 - INFO - id: digibok_2010021803034 -2025-05-08 09:11:29,819 - INFO - id: digibok_2008100803016_part0 -2025-05-08 09:11:29,819 - INFO - id: digibok_2011032520038 -2025-05-08 09:11:29,819 - INFO - id: digibok_2008040804081 -2025-05-08 09:11:29,819 - INFO - id: digibok_2007052510001_part0 -2025-05-08 09:11:29,819 - INFO - id: digibok_2010110206064 -2025-05-08 09:11:29,819 - INFO - id: digibok_2006111400055 -2025-05-08 09:11:29,819 - INFO - id: digibok_2006083100093 -2025-05-08 09:11:29,819 - INFO - id: digibok_2006112300030 -2025-05-08 09:11:29,819 - INFO - id: digibok_2009092303053_part0 -2025-05-08 09:11:29,819 - INFO - id: digibok_2006081700019 -2025-05-08 09:11:29,819 - INFO - id: digibok_2011052320006 -2025-05-08 09:11:29,819 - INFO - id: digibok_2013110508169 -2025-05-08 09:11:29,819 - INFO - id: digibok_2006112901003 -2025-05-08 09:11:29,819 - INFO - id: digibok_2008040104016 -2025-05-08 09:11:29,819 - INFO - id: digibok_2006111400038_part0 -2025-05-08 09:11:29,826 - INFO - id: digibok_2009020203018 -2025-05-08 09:11:29,826 - INFO - id: digibok_2006112000063 -2025-05-08 09:11:29,827 - INFO - id: digibok_2008010302001_part0 -2025-05-08 09:11:29,827 - INFO - id: digibok_2010012603006 -2025-05-08 09:11:29,827 - INFO - id: digibok_2007031201003_part0 -2025-05-08 09:11:29,827 - INFO - id: digibok_2006112901014_part0 -2025-05-08 09:11:29,829 - INFO - id: digibok_2006112101016_part0 -2025-05-08 09:11:29,829 - INFO - id: digibok_2006111400032 -2025-05-08 09:11:29,829 - INFO - id: digibok_2006081800011 -2025-05-08 09:11:29,829 - INFO - id: digibok_2006112201011 -2025-05-08 09:11:29,829 - INFO - id: digibok_2008121603002 -2025-05-08 09:11:29,829 - INFO - id: digibok_2009101503004_part0 -2025-05-08 09:11:29,831 - INFO - id: digibok_2006112101036 -2025-05-08 09:11:29,831 - INFO - id: digibok_2009030403030_part0 -2025-05-08 09:11:29,831 - INFO - id: digibok_2006112101017_part0 -2025-05-08 09:11:29,832 - INFO - id: digibok_2010062303033_part0 -2025-05-08 09:11:29,832 - INFO - id: digibok_2014071506052 -2025-05-08 09:11:29,832 - INFO - id: digibok_2006083100027 -2025-05-08 09:11:29,832 - INFO - id: digibok_2006111501021_part0 -2025-05-08 09:11:29,832 - INFO - id: digibok_2010062420005 -2025-05-08 09:11:29,832 - INFO - id: digibok_2006112000036 -2025-05-08 09:11:29,832 - INFO - id: digibok_2008091103001 -2025-05-08 09:11:29,834 - INFO - id: digibok_2006112300038_part2 -2025-05-08 09:11:29,835 - INFO - id: digibok_2009081804070 -2025-05-08 09:11:29,836 - INFO - id: digibok_2008040200081 -2025-05-08 09:11:29,836 - INFO - id: digibok_2009110600019 -2025-05-08 09:11:29,837 - INFO - id: digibok_2008082012002 -2025-05-08 09:11:29,837 - INFO - id: digibok_2007020101113 -2025-05-08 09:11:29,838 - INFO - id: digibok_2009080603007_part0 -2025-05-08 09:11:29,839 - INFO - id: digibok_2009031103016_part0 -2025-05-08 09:11:29,839 - INFO - id: digibok_2008090503002_part0 -2025-05-08 09:11:29,839 - INFO - id: digibok_2006111701001 -2025-05-08 09:11:29,839 - INFO - id: digibok_2010061706083 -2025-05-08 09:11:29,840 - INFO - id: digibok_2009092203020 -2025-05-08 09:11:29,840 - INFO - id: digibok_2006113000039_part0 -2025-05-08 09:11:29,841 - INFO - id: digibok_2009012903041 -2025-05-08 09:11:29,841 - INFO - id: digibok_2008101310004_part0 -2025-05-08 09:11:29,842 - INFO - id: digibok_2009052603029 -2025-05-08 09:11:29,842 - INFO - id: digibok_2006082800055 -2025-05-08 09:11:29,842 - INFO - id: digibok_2011051220009 -2025-05-08 09:11:29,842 - INFO - id: digibok_2010111008053 -2025-05-08 09:11:29,842 - INFO - id: digibok_2020060328011 -2025-05-08 09:11:29,842 - INFO - id: digibok_2006081700053 -2025-05-08 09:11:29,842 - INFO - id: digibok_2008040204088_part0 -2025-05-08 09:11:29,842 - INFO - id: digibok_2008040204002 -2025-05-08 09:11:29,842 - INFO - id: digibok_2006081500063 -2025-05-08 09:11:29,842 - INFO - id: digibok_2006112000068_part0 -2025-05-08 09:11:29,842 - INFO - id: digibok_2006111500058_part3 -2025-05-08 09:11:29,847 - INFO - id: digibok_2008033104055 -2025-05-08 09:11:29,847 - INFO - id: digibok_2015110407016_part0 -2025-05-08 09:11:29,848 - INFO - id: digibok_2006112101027 -2025-05-08 09:11:29,849 - INFO - id: digibok_2020022048004 -2025-05-08 09:11:29,849 - INFO - id: digibok_2009030403020 -2025-05-08 09:11:29,849 - INFO - id: digibok_2006112301023 -2025-05-08 09:11:29,849 - INFO - id: digibok_2015021006027 -2025-05-08 09:11:29,849 - INFO - id: digibok_2009082403014 -2025-05-08 09:11:29,849 - INFO - id: digibok_2010070205095 -2025-05-08 09:11:29,849 - INFO - id: digibok_2006082800057 -2025-05-08 09:11:29,850 - INFO - id: digibok_2010052606052_part0 -2025-05-08 09:11:29,850 - INFO - id: digibok_2006111501024 -2025-05-08 09:11:29,850 - INFO - id: digibok_2006112001065 -2025-05-08 09:11:29,850 - INFO - id: digibok_2011011712001_part0 -2025-05-08 09:11:29,851 - INFO - id: digibok_2009031003041_part0 -2025-05-08 09:11:29,851 - INFO - id: digibok_2008111103007 -2025-05-08 09:11:29,852 - INFO - id: digibok_2006081800017 -2025-05-08 09:11:29,853 - INFO - id: digibok_2006113001002_part1 -2025-05-08 09:11:29,853 - INFO - id: digibok_2012110805019 -2025-05-08 09:11:29,853 - INFO - id: digibok_2007010400102 -2025-05-08 09:11:29,853 - INFO - id: digibok_2007021301050 -2025-05-08 09:11:29,854 - INFO - id: digibok_2006082800059 -2025-05-08 09:11:29,854 - INFO - id: digibok_2006112101026 -2025-05-08 09:11:29,855 - INFO - id: digibok_2006111700004 -2025-05-08 09:11:29,855 - INFO - id: digibok_2006112200037 -2025-05-08 09:11:29,855 - INFO - id: digibok_2008111912002 -2025-05-08 09:11:29,855 - INFO - id: digibok_2008102300078 -2025-05-08 09:11:29,855 - INFO - id: digibok_2008040200082 -2025-05-08 09:11:29,855 - INFO - id: digibok_2009011203010_part0 -2025-05-08 09:11:29,855 - INFO - id: digibok_2014020328038 -2025-05-08 09:11:29,855 - INFO - id: digibok_2010021700061 -2025-05-08 09:11:29,855 - INFO - id: digibok_2006112201001 -2025-05-08 09:11:29,855 - INFO - id: digibok_2006111700064_part0 -2025-05-08 09:11:29,858 - INFO - id: digibok_2011050604089 -2025-05-08 09:11:29,858 - INFO - id: digibok_2010052706005 -2025-05-08 09:11:29,858 - INFO - id: digibok_2010052806010 -2025-05-08 09:11:29,858 - INFO - id: digibok_2006112001036 -2025-05-08 09:11:29,858 - INFO - id: digibok_2015012208070_part0 -2025-05-08 09:11:29,858 - INFO - id: digibok_2006111500059_part0 -2025-05-08 09:11:29,858 - INFO - id: digibok_2014021807176 -2025-05-08 09:11:29,858 - INFO - id: digibok_2010110906051 -2025-05-08 09:11:29,858 - INFO - id: digibok_2006111400053_part0 -2025-05-08 09:11:29,858 - INFO - id: digibok_2018050848076 -2025-05-08 09:11:29,858 - INFO - id: digibok_2007080103001 -2025-05-08 09:11:29,858 - INFO - id: digibok_2008111903032_part0 -2025-05-08 09:11:29,862 - INFO - id: digibok_2015010648113_part2 -2025-05-08 09:11:29,862 - INFO - id: digibok_2008111103037 -2025-05-08 09:11:29,862 - INFO - id: digibok_2009082103011_part0 -2025-05-08 09:11:29,862 - INFO - id: digibok_2006113000048_part0 -2025-05-08 09:11:29,862 - INFO - id: digibok_2014020328039 -2025-05-08 09:11:29,862 - INFO - id: digibok_2008040904008_part1 -2025-05-08 09:11:29,862 - INFO - id: digibok_2009050503023_part0 -2025-05-08 09:11:29,862 - INFO - id: digibok_2006082800018 -2025-05-08 09:11:29,862 - INFO - id: digibok_2010021803049_part0 -2025-05-08 09:11:29,862 - INFO - id: digibok_2006112001000 -2025-05-08 09:11:29,862 - INFO - id: digibok_2006112000017 -2025-05-08 09:11:29,874 - INFO - id: digibok_2006121101033 -2025-05-08 09:11:29,874 - INFO - id: digibok_2006111500057 -2025-05-08 09:11:29,874 - INFO - id: digibok_2013040824075 -2025-05-08 09:11:29,874 - INFO - id: digibok_2009050403004_part0 -2025-05-08 09:11:29,874 - INFO - id: digibok_2008101704022_part0 -2025-05-08 09:11:29,874 - INFO - id: digibok_2006112000075_part0 -2025-05-08 09:11:29,874 - INFO - id: digibok_2007101812001 -2025-05-08 09:11:29,874 - INFO - id: digibok_2009011203024 -2025-05-08 09:11:29,874 - INFO - id: digibok_2009080600068 -2025-05-08 09:11:29,874 - INFO - id: digibok_2006112000061 -2025-05-08 09:11:29,874 - INFO - id: digibok_2006120101038 -2025-05-08 09:11:29,874 - INFO - id: digibok_2010011113001 -2025-05-08 09:11:29,874 - INFO - id: digibok_2015081948162 -2025-05-08 09:11:29,874 - INFO - id: digibok_2010070206111 -2025-05-08 09:11:29,874 - INFO - id: digibok_2009071303016 -2025-05-08 09:11:29,874 - INFO - id: digibok_2010021803053 -2025-05-08 09:11:29,885 - INFO - id: digibok_2006083000050 -2025-05-08 09:11:29,886 - INFO - id: digibok_2008040200119 -2025-05-08 09:11:29,887 - INFO - id: digibok_2011020906004_part1 -2025-05-08 09:11:29,887 - INFO - id: digibok_2007073104012 -2025-05-08 09:11:29,887 - INFO - id: digibok_2006112201043 -2025-05-08 09:11:29,887 - INFO - id: digibok_2006112000002 -2025-05-08 09:11:29,887 - INFO - id: digibok_2008040801018 -2025-05-08 09:11:29,890 - INFO - id: digibok_2010011103018_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2010062306101_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2014021807002_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2006083100016 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008092300050_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2018050848074 -2025-05-08 09:11:29,890 - INFO - id: digibok_2010071323006 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008111103043 -2025-05-08 09:11:29,890 - INFO - id: digibok_2006112300040 -2025-05-08 09:11:29,890 - INFO - id: digibok_2007073002002 -2025-05-08 09:11:29,890 - INFO - id: digibok_2009011203010_part1 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008050713001_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008100803011 -2025-05-08 09:11:29,890 - INFO - id: digibok_2010021603003 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008050604038_part1 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008120412002 -2025-05-08 09:11:29,890 - INFO - id: digibok_2008092203007_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2011011905057_part0 -2025-05-08 09:11:29,890 - INFO - id: digibok_2016090948172 -2025-05-08 09:11:29,900 - INFO - id: digibok_2006112301025_part0 -2025-05-08 09:11:29,900 - INFO - id: digibok_2010053106089 -2025-05-08 09:11:29,902 - INFO - id: digibok_2010042106109 -2025-05-08 09:11:29,902 - INFO - id: digibok_2006111701033 -2025-05-08 09:11:29,902 - INFO - id: digibok_2009082403016_part0 -2025-05-08 09:11:29,904 - INFO - id: digibok_2009061104016 -2025-05-08 09:11:29,904 - INFO - id: digibok_2008063010001_part1 -2025-05-08 09:11:29,904 - INFO - id: digibok_2008103110002 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006122000014 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006112201034 -2025-05-08 09:11:29,906 - INFO - id: digibok_2011011906049_part2 -2025-05-08 09:11:29,906 - INFO - id: digibok_2008040200109 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010011113002 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006112301029 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010110805011 -2025-05-08 09:11:29,906 - INFO - id: digibok_2008040200086_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2009021603006_part2 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006111500044_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2018030148057 -2025-05-08 09:11:29,906 - INFO - id: digibok_2020060328001 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006111400039_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2014040348010 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010021803021 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010011512001 -2025-05-08 09:11:29,906 - INFO - id: digibok_2008090802001 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006112401023_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2008100910001_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006111500051_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006112000023 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010021700053 -2025-05-08 09:11:29,906 - INFO - id: digibok_2009020203020_part0 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010082710001 -2025-05-08 09:11:29,906 - INFO - id: digibok_2006081500058 -2025-05-08 09:11:29,906 - INFO - id: digibok_2010110806028 -2025-05-08 09:11:29,916 - INFO - id: digibok_2010120606054 -2025-05-08 09:11:29,916 - INFO - id: digibok_2014021807010_part0 -2025-05-08 09:11:29,916 - INFO - id: digibok_2009050403003_part0 -2025-05-08 09:11:29,916 - INFO - id: digibok_2009072703024 -2025-05-08 09:11:29,919 - INFO - id: digibok_2010042706049 -2025-05-08 09:11:29,919 - INFO - id: digibok_2009101310001_part1 -2025-05-08 09:11:29,919 - INFO - id: digibok_2008100603002 -2025-05-08 09:11:29,919 - INFO - id: digibok_2010071323003 -2025-05-08 09:11:29,921 - INFO - id: digibok_2010020203007 -2025-05-08 09:11:29,921 - INFO - id: digibok_2006112300056_part0 -2025-05-08 09:11:29,921 - INFO - id: digibok_2007011501002_part1 -2025-05-08 09:11:29,924 - INFO - id: digibok_2010011313001 -2025-05-08 09:11:29,924 - INFO - id: digibok_2011011906043_part0 -2025-05-08 09:11:29,924 - INFO - id: digibok_2009051310002 -2025-05-08 09:11:29,924 - INFO - id: digibok_2010063006065 -2025-05-08 09:11:29,924 - INFO - id: digibok_2008040204061 -2025-05-08 09:11:29,924 - INFO - id: digibok_2009031803004 -2025-05-08 09:11:29,924 - INFO - id: digibok_2010041903028 -2025-05-08 09:11:29,924 - INFO - id: digibok_2009011903008_part0 -2025-05-08 09:11:29,924 - INFO - id: digibok_2011021808101_part2 -2025-05-08 09:11:29,924 - INFO - id: digibok_2008090303004 -2025-05-08 09:11:29,924 - INFO - id: digibok_2009021603006_part1 -2025-05-08 09:11:29,924 - INFO - id: digibok_2010052706008_part0 -2025-05-08 09:11:29,924 - INFO - id: digibok_2006112001051_part0 -2025-05-08 09:11:29,924 - INFO - id: digibok_2009072003009_part0 -2025-05-08 09:11:29,924 - INFO - id: digibok_2010021803048 -2025-05-08 09:11:29,924 - INFO - id: digibok_2008040201032 -2025-05-08 09:11:29,924 - INFO - id: digibok_2008040304028 -2025-05-08 09:11:29,924 - INFO - id: digibok_2006111700064_part1 -2025-05-08 09:11:29,932 - INFO - id: digibok_2006111701032 -2025-05-08 09:11:29,932 - INFO - id: digibok_2006083000039_part0 -2025-05-08 09:11:29,932 - INFO - id: digibok_2011021808101_part5 -2025-05-08 09:11:29,932 - INFO - id: digibok_2006112201000 -2025-05-08 09:11:29,932 - INFO - id: digibok_2011021808101_part0 -2025-05-08 09:11:29,932 - INFO - id: digibok_2008070110002_part0 -2025-05-08 09:11:29,936 - INFO - id: digibok_2006082100022 -2025-05-08 09:11:29,936 - INFO - id: digibok_2008042204115 -2025-05-08 09:11:29,936 - INFO - id: digibok_2008092912003 -2025-05-08 09:11:29,936 - INFO - id: digibok_2006081800029 -2025-05-08 09:11:29,936 - INFO - id: digibok_2009080603009_part1 -2025-05-08 09:11:29,936 - INFO - id: digibok_2008041412001_part0 -2025-05-08 09:11:29,940 - INFO - id: digibok_2008040300005 -2025-05-08 09:11:29,940 - INFO - id: digibok_2011051604029 -2025-05-08 09:11:29,940 - INFO - id: digibok_2006120500028 -2025-05-08 09:11:29,940 - INFO - id: digibok_2006111601017 -2025-05-08 09:11:29,940 - INFO - id: digibok_2010112306065 -2025-05-08 09:11:29,940 - INFO - id: digibok_2010070606112_part0 -2025-05-08 09:11:29,940 - INFO - id: digibok_2006113000058_part0 -2025-05-08 09:11:29,940 - INFO - id: digibok_2007122712001_part2 -2025-05-08 09:11:29,940 - INFO - id: digibok_2009091403001_part1 -2025-05-08 09:11:29,940 - INFO - id: digibok_2010052706021 -2025-05-08 09:11:29,940 - INFO - id: digibok_2009011603019_part0 -2025-05-08 09:11:29,940 - INFO - id: digibok_2008040904008_part4 -2025-05-08 09:11:29,940 - INFO - id: digibok_2009082700002_part0 -2025-05-08 09:11:29,940 - INFO - id: digibok_2009051212001_part0 -2025-05-08 09:11:29,940 - INFO - id: digibok_2006081100017 -2025-05-08 09:11:29,940 - INFO - id: digibok_2006111401049 -2025-05-08 09:11:29,940 - INFO - id: digibok_2006081700005_part0 -2025-05-08 09:11:29,949 - INFO - id: digibok_2008040904008_part5 -2025-05-08 09:11:29,949 - INFO - id: digibok_2009072703031_part0 -2025-05-08 09:11:29,949 - INFO - id: digibok_2009092203019_part0 -2025-05-08 09:11:29,949 - INFO - id: digibok_2006081600029_part0 -2025-05-08 09:11:29,949 - INFO - id: digibok_2020042948011 -2025-05-08 09:11:29,949 - INFO - id: digibok_2006121201065_part0 -2025-05-08 09:11:29,949 - INFO - id: digibok_2008042900015 -2025-05-08 09:11:29,949 - INFO - id: digibok_2008090303001 -2025-05-08 09:11:29,952 - INFO - id: digibok_2011011320012_part0 -2025-05-08 09:11:29,952 - INFO - id: digibok_2010053106075 -2025-05-08 09:11:29,952 - INFO - id: digibok_2008040904008_part2 -2025-05-08 09:11:29,952 - INFO - id: digibok_2010020903020 -2025-05-08 09:11:29,952 - INFO - id: digibok_2006111501020 -2025-05-08 09:11:29,952 - INFO - id: digibok_2009012803023 -2025-05-08 09:11:29,952 - INFO - id: digibok_2014090108096 -2025-05-08 09:11:29,952 - INFO - id: digibok_2008041112001 -2025-05-08 09:11:29,952 - INFO - id: digibok_2006112001040_part0 -2025-05-08 09:11:29,952 - INFO - id: digibok_2010032303038 -2025-05-08 09:11:29,952 - INFO - id: digibok_2008110503004 -2025-05-08 09:11:29,952 - INFO - id: digibok_2008011113001_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010060806057_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010061506031_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2009070601033 -2025-05-08 09:11:29,957 - INFO - id: digibok_2008120212001 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010020503006_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2009091503026 -2025-05-08 09:11:29,957 - INFO - id: digibok_2011040706085 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006081100027_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006112801000_part2 -2025-05-08 09:11:29,957 - INFO - id: digibok_2009091503028_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2013061108130 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006083000019 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010052806031_part1 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006083000051 -2025-05-08 09:11:29,957 - INFO - id: digibok_2008110603003 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006090100031 -2025-05-08 09:11:29,957 - INFO - id: digibok_2008121603001 -2025-05-08 09:11:29,957 - INFO - id: digibok_2009022603027 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006081400005_part0 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010052606045 -2025-05-08 09:11:29,957 - INFO - id: digibok_2006112001051_part1 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010062306056_part1 -2025-05-08 09:11:29,957 - INFO - id: digibok_2010070605049 -2025-05-08 09:11:29,965 - INFO - id: digibok_2007073002001 -2025-05-08 09:11:29,965 - INFO - id: digibok_2006112301011 -2025-05-08 09:11:29,965 - INFO - id: digibok_2006112901038_part0 -2025-05-08 09:11:29,965 - INFO - id: digibok_2009081303019_part0 -2025-05-08 09:11:29,965 - INFO - id: digibok_2007062210011 -2025-05-08 09:11:29,965 - INFO - id: digibok_2009030603015_part0 -2025-05-08 09:11:29,965 - INFO - id: digibok_2007062212001 -2025-05-08 09:11:29,965 - INFO - id: digibok_2014070708077 -2025-05-08 09:11:29,965 - INFO - id: digibok_2010053106083_part0 -2025-05-08 09:11:29,965 - INFO - id: digibok_2008102912003 -2025-05-08 09:11:29,965 - INFO - id: digibok_2010052706060 -2025-05-08 09:11:29,969 - INFO - id: digibok_2020070748506 -2025-05-08 09:11:29,969 - INFO - id: digibok_2006111701012 -2025-05-08 09:11:29,969 - INFO - id: digibok_2008112403032 -2025-05-08 09:11:29,969 - INFO - id: digibok_2014081405094_part0 -2025-05-08 09:11:29,969 - INFO - id: digibok_2009061903033 -2025-05-08 09:11:29,969 - INFO - id: digibok_2010052706010 -2025-05-08 09:11:29,969 - INFO - id: digibok_2009072703033 -2025-05-08 09:11:29,969 - INFO - id: digibok_2008070202001_part0 -2025-05-08 09:11:29,969 - INFO - id: digibok_2008112003013_part0 -2025-05-08 09:11:29,969 - INFO - id: digibok_2008041503001 -2025-05-08 09:11:29,969 - INFO - id: digibok_2009051403015 -2025-05-08 09:11:29,969 - INFO - id: digibok_2008100603016_part1 -2025-05-08 09:11:29,973 - INFO - id: digibok_2006111500058_part1 -2025-05-08 09:11:29,973 - INFO - id: digibok_2009042303022 -2025-05-08 09:11:29,973 - INFO - id: digibok_2013101806018_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2006111701055_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2013042307039 -2025-05-08 09:11:29,973 - INFO - id: digibok_2008111103061_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2010052806020_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2007073110001 -2025-05-08 09:11:29,973 - INFO - id: digibok_2010083010001 -2025-05-08 09:11:29,973 - INFO - id: digibok_2011011906049_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2010081203019_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2006112101040 -2025-05-08 09:11:29,973 - INFO - id: digibok_2014102306003_part0 -2025-05-08 09:11:29,973 - INFO - id: digibok_2009082503016 -2025-05-08 09:11:29,973 - INFO - id: digibok_2011051604014 -2025-05-08 09:11:29,973 - INFO - id: digibok_2008031112003 -2025-05-08 09:11:29,973 - INFO - id: digibok_2018020148087 -2025-05-08 09:11:29,973 - INFO - id: digibok_2006083000064 -2025-05-08 09:11:29,973 - INFO - id: digibok_2006112900002_part3 -2025-05-08 09:11:29,981 - INFO - id: digibok_2009101310001_part0 -2025-05-08 09:11:29,981 - INFO - id: digibok_2010011413001 -2025-05-08 09:11:29,981 - INFO - id: digibok_2011041820028 -2025-05-08 09:11:29,981 - INFO - id: digibok_2007042604002_part0 -2025-05-08 09:11:29,981 - INFO - id: digibok_2009021603006_part3 -2025-05-08 09:11:29,981 - INFO - id: digibok_2006081800007 -2025-05-08 09:11:29,981 - INFO - id: digibok_2008011113001_part2 -2025-05-08 09:11:29,981 - INFO - id: digibok_2008033104042 -2025-05-08 09:11:29,981 - INFO - id: digibok_2008112403029 -2025-05-08 09:11:29,981 - INFO - id: digibok_2006112401022 -2025-05-08 09:11:29,985 - INFO - id: digibok_2011021808101_part1 -2025-05-08 09:11:29,985 - INFO - id: digibok_2006083100015_part0 -2025-05-08 09:11:29,985 - INFO - id: digibok_2006120700077 -2025-05-08 09:11:29,985 - INFO - id: digibok_2006112201036 -2025-05-08 09:11:29,985 - INFO - id: digibok_2008112403015_part0 -2025-05-08 09:11:29,987 - INFO - id: digibok_2007020500004_part0 -2025-05-08 09:11:29,987 - INFO - id: digibok_2006112900002_part1 -2025-05-08 09:11:29,988 - INFO - id: digibok_2010070205132 -2025-05-08 09:11:29,989 - INFO - id: digibok_2008082010006 -2025-05-08 09:11:29,989 - INFO - id: digibok_2006111601036 -2025-05-08 09:11:29,990 - INFO - id: digibok_2006113001047_part0 -2025-05-08 09:11:29,990 - INFO - id: digibok_2006083100052 -2025-05-08 09:11:29,990 - INFO - id: digibok_2008092203001 -2025-05-08 09:11:29,990 - INFO - id: digibok_2006112301028_part0 -2025-05-08 09:11:29,990 - INFO - id: digibok_2010021603015_part0 -2025-05-08 09:11:29,990 - INFO - id: digibok_2007072603004 -2025-05-08 09:11:29,990 - INFO - id: digibok_2020070748508 -2025-05-08 09:11:29,990 - INFO - id: digibok_2010110806068 -2025-05-08 09:11:29,990 - INFO - id: digibok_2011050604073 -2025-05-08 09:11:29,990 - INFO - id: digibok_2006120501023 -2025-05-08 09:11:29,990 - INFO - id: digibok_2009011903018 -2025-05-08 09:11:29,990 - INFO - id: digibok_2020020548503 -2025-05-08 09:11:29,990 - INFO - id: digibok_2010062306053 -2025-05-08 09:11:29,990 - INFO - id: digibok_2008091603035 -2025-05-08 09:11:29,990 - INFO - id: digibok_2007083110001 -2025-05-08 09:11:29,990 - INFO - id: digibok_2010062420006 -2025-05-08 09:11:29,990 - INFO - id: digibok_2009072703030_part0 -2025-05-08 09:11:29,990 - INFO - id: digibok_2006121201009 -2025-05-08 09:11:29,990 - INFO - id: digibok_2008092412001 -2025-05-08 09:11:29,990 - INFO - id: digibok_2010071423006 -2025-05-08 09:11:29,998 - INFO - id: digibok_2006111700066 -2025-05-08 09:11:29,998 - INFO - id: digibok_2009022603031 -2025-05-08 09:11:29,998 - INFO - id: digibok_2008012803001_part0 -2025-05-08 09:11:29,998 - INFO - id: digibok_2010111008043_part0 -2025-05-08 09:11:29,998 - INFO - id: digibok_2006120600032 -2025-05-08 09:11:29,998 - INFO - id: digibok_2006083100049 -2025-05-08 09:11:29,998 - INFO - id: digibok_2010051906056 -2025-05-08 09:11:29,998 - INFO - id: digibok_2007082112001_part0 -2025-05-08 09:11:29,998 - INFO - id: digibok_2006082400075_part0 -2025-05-08 09:11:29,998 - INFO - id: digibok_2008102812001 -2025-05-08 09:11:30,002 - INFO - id: digibok_2008090903026 -2025-05-08 09:11:30,002 - INFO - id: digibok_2006111500035 -2025-05-08 09:11:30,002 - INFO - id: digibok_2007082010001_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2006120400050_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2007073104013_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2008040200080_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2009091503019_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2006111701034_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2006120400062_part0 -2025-05-08 09:11:30,002 - INFO - id: digibok_2007061810001_part0 -2025-05-08 09:11:30,006 - INFO - id: digibok_2006112101037 -2025-05-08 09:11:30,006 - INFO - id: digibok_2011042720004 -2025-05-08 09:11:30,006 - INFO - id: digibok_2020070748507_part0 -2025-05-08 09:11:30,006 - INFO - id: digibok_2008111003004 -2025-05-08 09:11:30,006 - INFO - id: digibok_2007020500039 -2025-05-08 09:11:30,006 - INFO - id: digibok_2007091102001_part0 -2025-05-08 09:11:30,010 - INFO - id: digibok_2008100803008 -2025-05-08 09:11:30,010 - INFO - id: digibok_2007073110000 -2025-05-08 09:11:30,011 - INFO - id: digibok_2011011912001 -2025-05-08 09:11:30,011 - INFO - id: digibok_2009072703031_part1 -2025-05-08 09:11:30,011 - INFO - id: digibok_2010021903007 -2025-05-08 09:11:30,011 - INFO - id: digibok_2009042303032_part0 -2025-05-08 09:11:30,011 - INFO - id: digibok_2008051403001 -2025-05-08 09:11:30,011 - INFO - id: digibok_2009011203010_part2 -2025-05-08 09:11:30,011 - INFO - id: digibok_2008111303007 -2025-05-08 09:11:30,011 - INFO - id: digibok_2008040104022_part1 -2025-05-08 09:11:30,011 - INFO - id: digibok_2009050403002_part0 -2025-05-08 09:11:30,011 - INFO - id: digibok_2007010401072 -2025-05-08 09:11:30,011 - INFO - id: digibok_2010070706035 -2025-05-08 09:11:30,011 - INFO - id: digibok_2010070605062_part0 -2025-05-08 09:11:30,011 - INFO - id: digibok_2007042600049 -2025-05-08 09:11:30,011 - INFO - id: digibok_2006081600040 -2025-05-08 09:11:30,011 - INFO - id: digibok_2006120400033 -2025-05-08 09:11:30,011 - INFO - id: digibok_2008040804011 -2025-05-08 09:11:30,011 - INFO - id: digibok_2006111601024 -2025-05-08 09:11:30,018 - INFO - id: digibok_2009011203018_part0 -2025-05-08 09:11:30,018 - INFO - id: digibok_2010042703045 -2025-05-08 09:11:30,019 - INFO - id: digibok_2007082212002_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009010503030 -2025-05-08 09:11:30,019 - INFO - id: digibok_2007082910002 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006120101037_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006112201033 -2025-05-08 09:11:30,019 - INFO - id: digibok_2020060929001_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2008040204057_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2007122712001_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006083000009 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009020900097 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009072003035_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009092203006 -2025-05-08 09:11:30,019 - INFO - id: digibok_2010070606083_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2015010648113_part1 -2025-05-08 09:11:30,019 - INFO - id: digibok_2019082748016 -2025-05-08 09:11:30,019 - INFO - id: digibok_2007042712003_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009080603006 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009020900017 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009030603017 -2025-05-08 09:11:30,019 - INFO - id: digibok_2010081010001 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006111700010 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006111701053_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009102100002 -2025-05-08 09:11:30,019 - INFO - id: digibok_2008040904005 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006083100007_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2007011501002_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006111501000 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009060912007_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009081403021 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006112101021_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006111600062 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009081810001_part0 -2025-05-08 09:11:30,019 - INFO - id: digibok_2009020900018 -2025-05-08 09:11:30,019 - INFO - id: digibok_2006111400045 -2025-05-08 09:11:30,033 - INFO - id: digibok_2014120206059 -2025-05-08 09:11:30,033 - INFO - id: digibok_2010061506023 -2025-05-08 09:11:30,033 - INFO - id: digibok_2009013000007_part0 -2025-05-08 09:11:30,033 - INFO - id: digibok_2008100603013 -2025-05-08 09:11:30,033 - INFO - id: digibok_2011012406002_part0 -2025-05-08 09:11:30,034 - INFO - id: digibok_2008040904058 -2025-05-08 09:11:30,034 - INFO - id: digibok_2008110603002 -2025-05-08 09:11:30,035 - INFO - id: digibok_2010052606038_part0 -2025-05-08 09:11:30,035 - INFO - id: digibok_2015120108001 -2025-05-08 09:11:30,035 - INFO - id: digibok_2007062210022 -2025-05-08 09:11:30,035 - INFO - id: digibok_2006112301014 -2025-05-08 09:11:30,035 - INFO - id: digibok_2010081920010 -2025-05-08 09:11:30,035 - INFO - id: digibok_2006113000046_part2 -2025-05-08 09:11:30,035 - INFO - id: digibok_2006111401060_part0 -2025-05-08 09:11:30,035 - INFO - id: digibok_2015110407026 -2025-05-08 09:11:30,035 - INFO - id: digibok_2007080102001 -2025-05-08 09:11:30,035 - INFO - id: digibok_2008111903042_part0 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006111501006 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008040904060 -2025-05-08 09:11:30,039 - INFO - id: digibok_2010070606060 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006111700064_part2 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006120501139 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006112901029 -2025-05-08 09:11:30,039 - INFO - id: digibok_2007073104009 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008040300052 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008012312001_part0 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008042112001 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006120400046_part0 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006121201039 -2025-05-08 09:11:30,039 - INFO - id: digibok_2007121810001_part0 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008010312002 -2025-05-08 09:11:30,039 - INFO - id: digibok_2009090403027 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008112003011 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006111500016 -2025-05-08 09:11:30,039 - INFO - id: digibok_2009101310001_part2 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006120601021 -2025-05-08 09:11:30,039 - INFO - id: digibok_2009020203049 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008022710007_part0 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008092303003 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008101010001_part0 -2025-05-08 09:11:30,039 - INFO - id: digibok_2010061506028_part1 -2025-05-08 09:11:30,039 - INFO - id: digibok_2007082312001 -2025-05-08 09:11:30,039 - INFO - id: digibok_2007082012001_part1 -2025-05-08 09:11:30,039 - INFO - id: digibok_2007021301007 -2025-05-08 09:11:30,039 - INFO - id: digibok_2009042803007 -2025-05-08 09:11:30,039 - INFO - id: digibok_2007020500031 -2025-05-08 09:11:30,039 - INFO - id: digibok_2009072801119 -2025-05-08 09:11:30,039 - INFO - id: digibok_2006120801020 -2025-05-08 09:11:30,039 - INFO - id: digibok_2008101004041 -2025-05-08 09:11:30,050 - INFO - id: digibok_2008110503003 -2025-05-08 09:11:30,050 - INFO - id: digibok_2006120101035 -2025-05-08 09:11:30,050 - INFO - id: digibok_2010021700062_part0 -2025-05-08 09:11:30,050 - INFO - id: digibok_2009011903022_part0 -2025-05-08 09:11:30,050 - INFO - id: digibok_2010053106084_part0 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006111400039_part1 -2025-05-08 09:11:30,052 - INFO - id: digibok_2008012813001_part2 -2025-05-08 09:11:30,052 - INFO - id: digibok_2009011300092 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006083100007_part1 -2025-05-08 09:11:30,052 - INFO - id: digibok_2009020203036 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006082800104 -2025-05-08 09:11:30,052 - INFO - id: digibok_2010061403049 -2025-05-08 09:11:30,052 - INFO - id: digibok_2008040804106 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006112000060 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006112200023_part0 -2025-05-08 09:11:30,052 - INFO - id: digibok_2009031803022 -2025-05-08 09:11:30,052 - INFO - id: digibok_2008090104090 -2025-05-08 09:11:30,052 - INFO - id: digibok_2009071401050 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006120400009 -2025-05-08 09:11:30,052 - INFO - id: digibok_2006111501026 -2025-05-08 09:11:30,052 - INFO - id: digibok_2007122712001_part1 -2025-05-08 09:11:30,052 - INFO - id: digibok_2013060508014 -2025-05-08 09:11:30,052 - INFO - id: digibok_2008100803022 -2025-05-08 09:11:30,052 - INFO - id: digibok_2008050510001_part0 -2025-05-08 09:11:30,052 - INFO - id: digibok_2009012800066_part0 -2025-05-08 09:11:30,052 - INFO - id: digibok_2007073104011 -2025-05-08 09:11:30,058 - INFO - id: digibok_2010070606078 -2025-05-08 09:11:30,058 - INFO - id: digibok_2011041820008 -2025-05-08 09:11:30,058 - INFO - id: digibok_2006111601035_part0 -2025-05-08 09:11:30,058 - INFO - id: digibok_2008070112008_part0 -2025-05-08 09:11:30,058 - INFO - id: digibok_2013120906156_part0 -2025-05-08 09:11:30,058 - INFO - id: digibok_2007083012004 -2025-05-08 09:11:30,058 - INFO - id: digibok_2006111601042 -2025-05-08 09:11:30,058 - INFO - id: digibok_2006111500049_part0 -2025-05-08 09:11:30,058 - INFO - id: digibok_2008072504072 -2025-05-08 09:11:30,058 - INFO - id: digibok_2013100308058 -2025-05-08 09:11:30,058 - INFO - id: digibok_2010062106082 -2025-05-08 09:11:30,058 - INFO - id: digibok_2008012813001_part0 -2025-05-08 09:11:30,058 - INFO - id: digibok_2016091248029 -2025-05-08 09:11:30,058 - INFO - id: digibok_2011011906043_part1 -2025-05-08 09:11:30,058 - INFO - id: digibok_2009072801115 -2025-05-08 09:11:30,058 - INFO - id: digibok_2007073104005 -2025-05-08 09:11:30,058 - INFO - id: digibok_2006112100025 -2025-05-08 09:11:30,058 - INFO - id: digibok_2008040913001_part0 -2025-05-08 09:11:30,058 - INFO - id: digibok_2009021603012 -2025-05-08 09:11:30,058 - INFO - id: digibok_2009092303053_part1 -2025-05-08 09:11:30,058 - INFO - id: digibok_2008041401010 -2025-05-08 09:11:30,058 - INFO - id: digibok_2008100603023 -2025-05-08 09:11:30,065 - INFO - id: digibok_2010020412003 -2025-05-08 09:11:30,065 - INFO - id: digibok_2008092303015_part0 -2025-05-08 09:11:30,065 - INFO - id: digibok_2011011905056 -2025-05-08 09:11:30,065 - INFO - id: digibok_2013021224002 -2025-05-08 09:11:30,065 - INFO - id: digibok_2006120401096_part1 -2025-05-08 09:11:30,065 - INFO - id: digibok_2009081801031 -2025-05-08 09:11:30,065 - INFO - id: digibok_2010052806029_part0 -2025-05-08 09:11:30,065 - INFO - id: digibok_2014032408188 -2025-05-08 09:11:30,068 - INFO - id: digibok_2010052806031_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006112101034 -2025-05-08 09:11:30,068 - INFO - id: digibok_2009030313009_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2011022306029 -2025-05-08 09:11:30,068 - INFO - id: digibok_2008110603004 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006120401096_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2008072410001 -2025-05-08 09:11:30,068 - INFO - id: digibok_2010060106052 -2025-05-08 09:11:30,068 - INFO - id: digibok_2008121100032_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006111701046 -2025-05-08 09:11:30,068 - INFO - id: digibok_2008040300031 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006082900070_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006112300024_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2007072403002_part1 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006111600071_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2017091107016 -2025-05-08 09:11:30,068 - INFO - id: digibok_2006112301005_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2010020503015 -2025-05-08 09:11:30,068 - INFO - id: digibok_2009030403008 -2025-05-08 09:11:30,068 - INFO - id: digibok_2009091403001_part0 -2025-05-08 09:11:30,068 - INFO - id: digibok_2014100807573 -2025-05-08 09:11:30,068 - INFO - id: digibok_2009080603019 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006112300049_part0 -2025-05-08 09:11:30,077 - INFO - id: digibok_2010021812001 -2025-05-08 09:11:30,077 - INFO - id: digibok_2008100603008 -2025-05-08 09:11:30,077 - INFO - id: digibok_2009011503003 -2025-05-08 09:11:30,077 - INFO - id: digibok_2010120906080 -2025-05-08 09:11:30,077 - INFO - id: digibok_2011011012002_part0 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006112101054 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006082400037 -2025-05-08 09:11:30,077 - INFO - id: digibok_2008040400005_part0 -2025-05-08 09:11:30,077 - INFO - id: digibok_2009082503010 -2025-05-08 09:11:30,077 - INFO - id: digibok_2009080603013 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006082500045 -2025-05-08 09:11:30,077 - INFO - id: digibok_2014081908201 -2025-05-08 09:11:30,077 - INFO - id: digibok_2011011212001_part0 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006112000027 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006112300024_part1 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006082400017 -2025-05-08 09:11:30,077 - INFO - id: digibok_2008110503005 -2025-05-08 09:11:30,077 - INFO - id: digibok_2010052806030_part1 -2025-05-08 09:11:30,077 - INFO - id: digibok_2006120701023 -2025-05-08 09:11:30,077 - INFO - id: digibok_2018022807066 -2025-05-08 09:11:30,077 - INFO - id: digibok_2014010838002 -2025-05-08 09:11:30,077 - INFO - id: digibok_2008051912002_part0 -2025-05-08 09:11:30,077 - INFO - id: digibok_2008072810004 -2025-05-08 09:11:30,085 - INFO - id: digibok_2008121001030 -2025-05-08 09:11:30,085 - INFO - id: digibok_2008040304017 -2025-05-08 09:11:30,086 - INFO - id: digibok_2008042800052 -2025-05-08 09:11:30,086 - INFO - id: digibok_2016010807521_part0 -2025-05-08 09:11:30,086 - INFO - id: digibok_2010070205069 -2025-05-08 09:11:30,086 - INFO - id: digibok_2009060912002 -2025-05-08 09:11:30,086 - INFO - id: digibok_2009021803049_part0 -2025-05-08 09:11:30,086 - INFO - id: digibok_2008121603004 -2025-05-08 09:11:30,086 - INFO - id: digibok_2008111903006 -2025-05-08 09:11:30,086 - INFO - id: digibok_2012082309565_part0 -2025-05-08 09:11:30,086 - INFO - id: digibok_2006081600019 -2025-05-08 09:11:30,086 - INFO - id: digibok_2014011606038 -2025-05-08 09:11:30,086 - INFO - id: digibok_2008111112002 -2025-05-08 09:11:30,086 - INFO - id: digibok_2008111103057_part0 -2025-05-08 09:11:30,086 - INFO - id: digibok_2009080603009_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2010061706067 -2025-05-08 09:11:30,090 - INFO - id: digibok_2010111208080_part2 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006111500018_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009092203019_part1 -2025-05-08 09:11:30,090 - INFO - id: digibok_2011021808101_part3 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009061903028_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006120500002 -2025-05-08 09:11:30,090 - INFO - id: digibok_2008040200023 -2025-05-08 09:11:30,090 - INFO - id: digibok_2014040724003 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009030403033 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006082200012 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006111601005_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2010070706062 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009021204071 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009042303023_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006112000022 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009060204076 -2025-05-08 09:11:30,090 - INFO - id: digibok_2008041410001_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2007011600032 -2025-05-08 09:11:30,090 - INFO - id: digibok_2008100603014_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2008111103039 -2025-05-08 09:11:30,090 - INFO - id: digibok_2008030310001_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006112000032 -2025-05-08 09:11:30,090 - INFO - id: digibok_2010011903026_part1 -2025-05-08 09:11:30,090 - INFO - id: digibok_2010111006094 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006083000041 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009020600075 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006081000049_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009052803028_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006111401057 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006111601038 -2025-05-08 09:11:30,090 - INFO - id: digibok_2010062306056_part0 -2025-05-08 09:11:30,090 - INFO - id: digibok_2009021204062 -2025-05-08 09:11:30,090 - INFO - id: digibok_2006111500002 -2025-05-08 09:11:30,101 - INFO - id: digibok_2006112001011_part0 -2025-05-08 09:11:30,101 - INFO - id: digibok_2006111400044 -2025-05-08 09:11:30,101 - INFO - id: digibok_2007062212013_part0 -2025-05-08 09:11:30,101 - INFO - id: digibok_2009072003007 -2025-05-08 09:11:30,102 - INFO - id: digibok_2007083013001 -2025-05-08 09:11:30,102 - INFO - id: digibok_2010052006001_part0 -2025-05-08 09:11:30,102 - INFO - id: digibok_2010070208011 -2025-05-08 09:11:30,102 - INFO - id: digibok_2006113000048_part1 -2025-05-08 09:11:30,102 - INFO - id: digibok_2009072703023 -2025-05-08 09:11:30,102 - INFO - id: digibok_2011021808101_part4 -2025-05-08 09:11:30,102 - INFO - id: digibok_2007062504025 -2025-05-08 09:11:30,103 - INFO - id: digibok_2008031312001_part0 -2025-05-08 09:11:30,103 - INFO - id: digibok_2009021803009_part0 -2025-05-08 09:11:30,103 - INFO - id: digibok_2009033103003 -2025-05-08 09:11:30,103 - INFO - id: digibok_2011033004013 -2025-05-08 09:11:30,103 - INFO - id: digibok_2007052510001_part1 -2025-05-08 09:11:30,103 - INFO - id: digibok_2006111500061_part0 -2025-05-08 09:11:30,103 - INFO - id: digibok_2008072412004 -2025-05-08 09:11:30,103 - INFO - id: digibok_2009082503026 -2025-05-08 09:11:30,103 - INFO - id: digibok_2009073000057 -2025-05-08 09:11:30,103 - INFO - id: digibok_2006082900030_part0 -2025-05-08 09:11:30,103 - INFO - id: digibok_2006081600050 -2025-05-08 09:11:30,103 - INFO - id: digibok_2006120101034 -2025-05-08 09:11:30,103 - INFO - id: digibok_2006112100014 -2025-05-08 09:11:30,103 - INFO - id: digibok_2007073104010 -2025-05-08 09:11:30,108 - INFO - id: digibok_2010052806030_part2 -2025-05-08 09:11:30,108 - INFO - id: digibok_2010061506028_part2 -2025-05-08 09:11:30,108 - INFO - id: digibok_2008090503001 -2025-05-08 09:11:30,108 - INFO - id: digibok_2010021003016_part1 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006120700035 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006120700099 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006120600017 -2025-05-08 09:11:30,108 - INFO - id: digibok_2009082503004 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006112900002_part0 -2025-05-08 09:11:30,108 - INFO - id: digibok_2009033103036 -2025-05-08 09:11:30,108 - INFO - id: digibok_2007091703001 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006081800052 -2025-05-08 09:11:30,108 - INFO - id: digibok_2007121812003_part0 -2025-05-08 09:11:30,108 - INFO - id: digibok_2009092303061 -2025-05-08 09:11:30,108 - INFO - id: digibok_2010071323001 -2025-05-08 09:11:30,108 - INFO - id: digibok_2010121306078 -2025-05-08 09:11:30,108 - INFO - id: digibok_2008050604038_part0 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006111600001 -2025-05-08 09:11:30,108 - INFO - id: digibok_2010063020008 -2025-05-08 09:11:30,108 - INFO - id: digibok_2008112003014_part0 -2025-05-08 09:11:30,108 - INFO - id: digibok_2008040300027 -2025-05-08 09:11:30,108 - INFO - id: digibok_2009082403016_part1 -2025-05-08 09:11:30,108 - INFO - id: digibok_2006083100056_part0 -2025-05-08 09:11:30,108 - INFO - id: digibok_2008033104029 -2025-05-08 09:11:30,108 - INFO - id: digibok_2009021204068 -2025-05-08 09:11:30,116 - INFO - id: digibok_2011033004001 -2025-05-08 09:11:30,116 - INFO - id: digibok_2010051906055 -2025-05-08 09:11:30,116 - INFO - id: digibok_2015010648113_part0 -2025-05-08 09:11:30,116 - INFO - id: digibok_2010052806030_part0 -2025-05-08 09:11:30,116 - INFO - id: digibok_2006081100006 -2025-05-08 09:11:30,118 - INFO - id: digibok_2010051906058 -2025-05-08 09:11:30,118 - INFO - id: digibok_2008040204003_part0 -2025-05-08 09:11:30,118 - INFO - id: digibok_2015021308020_part0 -2025-05-08 09:11:30,118 - INFO - id: digibok_2009070601030 -2025-05-08 09:11:30,118 - INFO - id: digibok_2008040200070 -2025-05-08 09:11:30,118 - INFO - id: digibok_2020022728001_part0 -2025-05-08 09:11:30,118 - INFO - id: digibok_2010010803003 -2025-05-08 09:11:30,118 - INFO - id: digibok_2011022205178 -2025-05-08 09:11:30,118 - INFO - id: digibok_2006112300022_part0 -2025-05-08 09:11:30,118 - INFO - id: digibok_2006082400012 -2025-05-08 09:11:30,118 - INFO - id: digibok_2009042304001 -2025-05-08 09:11:30,123 - INFO - id: digibok_2008111903023 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006112000075_part1 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006113001019 -2025-05-08 09:11:30,123 - INFO - id: digibok_2009011903024 -2025-05-08 09:11:30,123 - INFO - id: digibok_2009103000097 -2025-05-08 09:11:30,123 - INFO - id: digibok_2010012603026 -2025-05-08 09:11:30,123 - INFO - id: digibok_2009081903038_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2007020801041 -2025-05-08 09:11:30,123 - INFO - id: digibok_2008060612003_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006120401096_part2 -2025-05-08 09:11:30,123 - INFO - id: digibok_2007072403002_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2010052606056_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2017010348104 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006111701016 -2025-05-08 09:11:30,123 - INFO - id: digibok_2016072948051 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006113001013_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2007042600033 -2025-05-08 09:11:30,123 - INFO - id: digibok_2009041403025_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2011042720001 -2025-05-08 09:11:30,123 - INFO - id: digibok_2008050712001_part1 -2025-05-08 09:11:30,123 - INFO - id: digibok_2007082712001 -2025-05-08 09:11:30,123 - INFO - id: digibok_2010011213001 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006120501021 -2025-05-08 09:11:30,123 - INFO - id: digibok_2008050512009 -2025-05-08 09:11:30,123 - INFO - id: digibok_2009020903017 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006120400050_part1 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006082900080 -2025-05-08 09:11:30,123 - INFO - id: digibok_2010070813002 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006082300009 -2025-05-08 09:11:30,123 - INFO - id: digibok_2010020203008_part0 -2025-05-08 09:11:30,123 - INFO - id: digibok_2008040904008_part3 -2025-05-08 09:11:30,123 - INFO - id: digibok_2010052606033 -2025-05-08 09:11:30,123 - INFO - id: digibok_2006112301007_part0 -2025-05-08 09:11:30,133 - INFO - id: digibok_2008051912002_part1 -2025-05-08 09:11:30,133 - INFO - id: digibok_2008010713001_part1 -2025-05-08 09:11:30,133 - INFO - id: digibok_2014021724010 -2025-05-08 09:11:30,133 - INFO - id: digibok_2006112001022_part0 -2025-05-08 09:11:30,133 - INFO - id: digibok_2009052703017 -2025-05-08 09:11:30,135 - INFO - id: digibok_2006112401015 -2025-05-08 09:11:30,135 - INFO - id: digibok_2010061506031_part1 -2025-05-08 09:11:30,135 - INFO - id: digibok_2008111103038_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010070806077_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2008030612001 -2025-05-08 09:11:30,136 - INFO - id: digibok_2007082012001_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009020900028 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009033103022 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009071312001_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006081800048 -2025-05-08 09:11:30,136 - INFO - id: digibok_2008092303013 -2025-05-08 09:11:30,136 - INFO - id: digibok_2015102348018_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2011040708023 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010071323002 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010021700058 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009081810003_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010062306007 -2025-05-08 09:11:30,136 - INFO - id: digibok_2011011320013_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009020203002 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010070208002_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006113000023_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006113001045 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010111208080_part1 -2025-05-08 09:11:30,136 - INFO - id: digibok_2008012203003 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006082300015 -2025-05-08 09:11:30,136 - INFO - id: digibok_2011051004139 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009100912003_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006112000048_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2010061706080_part0 -2025-05-08 09:11:30,136 - INFO - id: digibok_2020031228006 -2025-05-08 09:11:30,136 - INFO - id: digibok_2011052004002 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006120401046 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006111701053_part1 -2025-05-08 09:11:30,136 - INFO - id: digibok_2006081000020 -2025-05-08 09:11:30,136 - INFO - id: digibok_2007062210018 -2025-05-08 09:11:30,136 - INFO - id: digibok_2009012803001 -2025-05-08 09:11:30,136 - INFO - id: digibok_2008111903041 -2025-05-08 09:11:30,136 - INFO - id: digibok_2008100803009 -2025-05-08 09:11:30,149 - INFO - id: digibok_2012092106165 -2025-05-08 09:11:30,149 - INFO - id: digibok_2006120701122 -2025-05-08 09:11:30,149 - INFO - id: digibok_2008090903038 -2025-05-08 09:11:30,149 - INFO - id: digibok_2008050712001_part0 -2025-05-08 09:11:30,149 - INFO - id: digibok_2009061903024 -2025-05-08 09:11:30,149 - INFO - id: digibok_2006120501036 -2025-05-08 09:11:30,149 - INFO - id: digibok_2007010501035 -2025-05-08 09:11:30,149 - INFO - id: digibok_2010061506007 -2025-05-08 09:11:30,149 - INFO - id: digibok_2006120801014 -2025-05-08 09:11:30,152 - INFO - id: digibok_2008120812001 -2025-05-08 09:11:30,152 - INFO - id: digibok_2008040201008 -2025-05-08 09:11:30,153 - INFO - id: digibok_2009080603011 -2025-05-08 09:11:30,153 - INFO - id: digibok_2009110200015 -2025-05-08 09:11:30,153 - INFO - id: digibok_2011011312002_part0 -2025-05-08 09:11:30,153 - INFO - id: digibok_2006112901001 -2025-05-08 09:11:30,153 - INFO - id: digibok_2007073104003 -2025-05-08 09:11:30,153 - INFO - id: digibok_2008040912002_part1 -2025-05-08 09:11:30,153 - INFO - id: digibok_2008063010001_part0 diff --git a/data/ncc_books/ncc_books.md b/data/ncc_books/ncc_books.md deleted file mode 100644 index aca2b4d055871f1f64f0e5a92a6fcf281beea3db..0000000000000000000000000000000000000000 --- a/data/ncc_books/ncc_books.md +++ /dev/null @@ -1,147 +0,0 @@ ---- -pretty_name: Norwegian Colossal Corpus (books) -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Books ---- - -# Dataset Card for Norwegian Colossal Corpus (books) - - -Danish books extracted from the [Norwegian Colossal Corpus](https://huggingface.co/datasets/NbAiLab/NCC) derived from OCR. - - -The Norwegian Colossal Corpus is a collection of multiple smaller Norwegian corpuses suitable for training large language models. - - -## Dataset Description - - -- **Number of samples**: 4.90K -- **Number of tokens (Llama 3)**: 531.97M -- **Average document length in tokens (min, max)**: 108.52K (58, 383.51K) - - - -## Dataset Structure -An example from the dataset looks as follows. - -```py -{ - "id": "digibok_2009033103031", - "text": "P. FR. RIST. OLAF RYES SAGA. OPTEGNELSER, DAGBØGER OG BREVE. DET NORDISKE FORLAG. Denne Bog søger at[...]", - "source": "ncc_books", - "added": "2025-05-08", - "created": "1899-01-01, 1899-12-31", - "token_count": 192301 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information -d -## License Information - -This dataset is licensed under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/). -This license is derived from the original [publication](https://huggingface.co/datasets/NbAiLab/NCC), which is published by the -[National Library of Norway](https://www.nb.no/en/). - - -## Filtering - -This subset is the result of the following filtering from all available data splits on the [NCC](https://huggingface.co/datasets/NbAiLab/NCC): - -- is_books: Documents, which are tagged as books -- language_filter: Document is classified as Danish with a threshold of 0.75 -- min_length: Document has at least 10 words (whitespace separated strings + punctuation) -- alpha_ratio: The ratio of all words / words with only alphabetical characters is at least 0.7 -- min_stop_words: The document contains at least 2 Danish stop words -- duplicate: Duplicate documents were removed - -The effect of each of these steps is outlined in the table below: - -| Filtering step | Number of document | -| --------------- | ------------------ | -| is_books | 20 939 | -| language_filter | 5 125 | -| min_length | 5 125 | -| alpha_ratio | 4 902 | -| min_stop_words | 4 902 | -| duplicate | 4 902 | - - -## Quality - -It is important to note, that recurring [OCR](https://en.wikipedia.org/wiki/Optical_character_recognition) errors and historic expressions in older -texts hinder the legibility of some of the documents and make differentiating between Norwegian and Danish difficult. - -### Citation Information - -If you use this source please cite the following articles: - -``` -@inproceedings{kummervold-etal-2022-norwegian-colossal, - title = {The {N}orwegian colossal corpus: A text corpus for training large {N}orwegian language models}, - author = {Kummervold, Per E and - Wetjen, Freddy and - De la Rosa, Javier}, - booktitle = {Proceedings of the Thirteenth Language Resources and Evaluation Conference (LREC)}, - year = {2022}, - address = {Marseille, France}, - publisher = {European Language Resources Association}, - url = {https://aclanthology.org/2022.lrec-1.410}, - pages = {3852--3860}, - abstract = {Norwegian has been one of many languages lacking sufficient available text to train quality language models. In an attempt to bridge this gap, we introduce the Norwegian Colossal Corpus (NCC), which comprises 49GB of clean Norwegian textual data containing over 7B words. The NCC is composed of different and varied sources, ranging from books and newspapers to government documents and public reports, showcasing the various uses of the Norwegian language in society. The corpus contains mainly Norwegian Bokmål and Norwegian Nynorsk. Each document in the corpus is tagged with metadata that enables the creation of sub-corpora for specific needs. Its structure makes it easy to combine with large web archives that for licensing reasons could not be distributed together with the NCC. By releasing this corpus openly to the public, we hope to foster the creation of both better Norwegian language models and multilingual language models with support for Norwegian.}, -} - -@inproceedings{kummervold-etal-2021-operationalizing, - title = {Operationalizing a National Digital Library: The Case for a {N}orwegian Transformer Model}, - author = {Kummervold, Per E and - De la Rosa, Javier and - Wetjen, Freddy and - Brygfjeld, Svein Arne}, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa)}, - year = {2021}, - address = {Reykjavik, Iceland (Online)}, - publisher = {Linköping University Electronic Press, Sweden}, - url = {https://aclanthology.org/2021.nodalida-main.3}, - pages = {20--29}, - abstract = {In this work, we show the process of building a large-scale training set from digital and digitized collections at a national library. - The resulting Bidirectional Encoder Representations from Transformers (BERT)-based language model for Norwegian outperforms multilingual BERT (mBERT) models - in several token and sequence classification tasks for both Norwegian Bokmål and Norwegian Nynorsk. Our model also improves the mBERT performance for other - languages present in the corpus such as English, Swedish, and Danish. For languages not included in the corpus, the weights degrade moderately while keeping strong multilingual properties. Therefore, - we show that building high-quality models within a memory institution using somewhat noisy optical character recognition (OCR) content is feasible, and we hope to pave the way for other memory institutions to follow.}, -} - -``` diff --git a/data/ncc_books/ncc_books.parquet b/data/ncc_books/ncc_books.parquet deleted file mode 100644 index 527368df890adae78d4f5d91aadde4d50deb871a..0000000000000000000000000000000000000000 --- a/data/ncc_books/ncc_books.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4b3f2cc7fbfc6fdf31ee760d65a38f489846b89c22ea87ba4b444c9ebfe56bd1 -size 978761307 diff --git a/data/ncc_maalfrid/create.py b/data/ncc_maalfrid/create.py deleted file mode 100644 index aa41acaf5fa697816925be16a1ed3175ca66627a..0000000000000000000000000000000000000000 --- a/data/ncc_maalfrid/create.py +++ /dev/null @@ -1,330 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0" -# ] -# /// - -import inspect -import logging -import re -from collections import defaultdict -from collections.abc import Callable -from datetime import datetime -from pathlib import Path - -import pandas as pd -from datasets import Dataset, load_dataset - -logger = logging.getLogger(__name__) -########## edit manually for each source -hf_path = "NbAiLab/NCC" -source = "ncc_maalfrid" -doc_type_searchword = "maalfrid" -license = "other" -domain = "Legal" -num_proc = 10 -########## -today = datetime.now().strftime("%Y-%m-%d") - -# stop words taken from spaCy -# https://github.com/explosion/spaCy/blob/master/spacy/lang/da/stop_words.py -# Source: Handpicked by Jens Dahl Møllerhøj. -spacy_sw = set( - """ -af aldrig alene alle allerede alligevel alt altid anden andet andre at - -bag begge blandt blev blive bliver burde bør - -da de dem den denne dens der derefter deres derfor derfra deri dermed derpå derved det dette dig din dine disse dog du - -efter egen eller ellers en end endnu ene eneste enhver ens enten er et - -flere flest fleste for foran fordi forrige fra få før først - -gennem gjorde gjort god gør gøre gørende - -ham han hans har havde have hel heller hen hende hendes henover her herefter heri hermed herpå hun hvad hvem hver hvilke hvilken hvilkes hvis hvor hvordan hvorefter hvorfor hvorfra hvorhen hvori hvorimod hvornår hvorved - -i igen igennem ikke imellem imens imod ind indtil ingen intet - -jeg jer jeres jo - -kan kom kommer kun kunne - -lad langs lav lave lavet lidt lige ligesom lille længere - -man mange med meget mellem men mens mere mest mig min mindre mindst mine mit må måske - -ned nemlig nogen nogensinde noget nogle nok nu ny nyt nær næste næsten - -og også om omkring op os over overalt - -på - -samme sammen selv selvom senere ses siden sig sige skal skulle som stadig synes syntes så sådan således - -temmelig tidligere til tilbage tit - -ud uden udover under undtagen - -var ved vi via vil ville vore vores vær være været - -øvrigt -""".split() -) - - -def word_tokenize(text: str) -> list[str]: - """ - Tokenizes a string into words, splitting on whitespace and punctuation. - - Example: - >>> word_tokenize("Hello, world!") - ['Hello', ',', 'world', '!'] - >>> word_tokenize("This is a test.") - ['This', 'is', 'a', 'test', '.'] - >>> word_tokenize("Many spaces between words.") - ['Many', 'spaces', 'between', 'words', '.'] - """ - - punkt = [",", ".", "!", "?", ":", ";", "(", ")", "[", "]", "{", "}", '"', "'"] - for p in punkt: - text = text.replace(p, f" {p} ") - return text.split() - - -def alpha_ratio(text: str | list[str]) -> float: - """ - If not split already to words, splits text with word_tokenize() - Calculates ratio of words with only alphabetical characters - - """ - if type(text) is str: - text = word_tokenize(text) - else: - pass - - alpha_ratio = 1 - sum(not word.isalpha() for word in text) / len(text) - - return alpha_ratio - - -def count_min_target(given_list: list, target_list: list, min: int) -> bool: - """ - Iterates through given list, until at least min items match any items from target list - - """ - c_item = 0 - given_list_iter = iter(given_list) - while c_item < min: - try: - current_item = next(given_list_iter) - if current_item in target_list: - c_item += 1 - except StopIteration: - break - - return c_item == min - - -def dynaword_format( - meta_document: dict[str, str | int], -) -> dict[str, str | dict[str, str]]: - """Reformats data to fit dynaword standards""" - - text = meta_document.get("text") - id = meta_document.get("id") - date = meta_document.get("publish_year") - doc_type = meta_document.get("doc_type") - - newdata = { - "text": text, - "source": source, - "id": id, - "added": today, - "created": f"{date}-01-01, {date}-12-31", - "license": license, - "domain": domain, - "metadata": { - "source-pretty": f"Norwegian Colossal Corpus ({re.sub('ncc_', '', source)})", - "source-type": doc_type, - }, - } - - return newdata - - -def log_pre_filter_lang_data( - lang_metadata: dict[str, dict[str, int]], filtered_ds: Dataset -): - """ - Function for logging changes in a large dataset, - based on the metadata pre filering and the filtered dataset, - used for language filtering - """ - all_docs = sum(lang_metadata[source].values()) - no_docs = lang_metadata[source].get("no") - da_docs = lang_metadata[source].get("da") - no_perc = round(no_docs / all_docs * 100, 4) - da_perc = round(da_docs / all_docs * 100, 4) - - f_length = len(filtered_ds) - f_perc = round(f_length / da_docs * 100, 4) - f_total_perc = round(f_length / all_docs * 100, 4) - - logger.info(f"Documents of {source}:") - logger.info(f"NO: {no_docs}, {no_perc}% ; DA: {da_docs}, {da_perc}%") - logger.info("After language confidence filtering:") - logger.info(f"DA: {f_length}, lost: {100 - f_perc}%") - logger.info("Total document change:") - logger.info(f"{all_docs} -> {f_length}, loss: {100 - f_total_perc}%") - - -def get_var_name(var): - """outputs the variable name""" - callers_local_vars = inspect.currentframe().f_back.f_back.f_back.f_locals.items() - return [var_name for var_name, var_val in callers_local_vars if var_val is var] - - -def filter_with_changelog( - filter_func: Callable[[Dataset], Dataset], dataset: Dataset -) -> Dataset: - """ - Function, which takes a filter and a dataset. - Counts text docs and tokens before and after filtering, - Saves filtering changes to log. - """ - - filter_name = get_var_name(filter_func) - pre_filter_docs = len(dataset) - pre_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - - dataset = dataset.filter(filter_func, num_proc=num_proc) - - post_filter_docs = len(dataset) - post_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - tokens_removed = round((1 - (post_filter_tokens / pre_filter_tokens)) * 100, 2) - docs_removed = round((1 - (post_filter_docs / pre_filter_docs)) * 100, 2) - - logger.info(f"FILTER: {filter_name}") - logger.info( - f"TOKENS: pre: {pre_filter_tokens}, post: {post_filter_tokens}, loss: {tokens_removed}%" - ) - logger.info( - f"DOCUMENTS: pre: {pre_filter_docs}, post: {post_filter_docs}, loss: {docs_removed}%" - ) - - return dataset - - -source_filter = lambda ds: doc_type_searchword in ds["doc_type"] # noqa -length_filter = lambda ds: len(word_tokenize(ds["text"])) >= 10 # noqa -too_long_filter = lambda ds: len(word_tokenize(ds["text"])) > 1e5 # noqa -alpha_filter = lambda ds: alpha_ratio(ds["text"]) >= 0.7 # noqa -stop_word_filter = lambda ds: count_min_target(word_tokenize(ds["text"]), spacy_sw, 2) # noqa - -samples_pr_source: dict = defaultdict(lambda: defaultdict(int)) - - -def language_filter_with_desc_stats(ds: Dataset) -> bool: - """ - Language filtering in a streamed dataset while logging all languages - """ - s = source - language = ds["lang_fasttext"] - samples_pr_source[s][language] += 1 - - language_filter = ( - ds["lang_fasttext"] == "da" and float(ds["lang_fasttext_conf"]) >= 0.75 - ) - - return language_filter - - -def quality_checks(ds: Dataset) -> Dataset: - """ - Quality checks for: - - no duplicate ids - - no duplicate texts - - logs texts > 1e5 tokens - """ - # convert to pandas for the drop_duplicates() - df = pd.DataFrame(ds) - # remove duplicate ids - len_df = len(df) - df = df.drop_duplicates(subset=["id"]) - logger.info(f"Removed {len_df - len(df)} duplicate ids") - # remove rows with duplicate text - len_df = len(df) - df = df.drop_duplicates(subset=["text"]) - logger.info(f"Removed {len_df - len(df)} rows with duplicate text") - # reconvert and remove index - ds_f = Dataset.from_pandas(df, preserve_index=False) - try: - ds_f["__index_level_0__"] - ds_f = ds_f.remove_columns("__index_level_0__") - except KeyError: - pass - - assert len(set(ds_f["id"])) == len(ds_f), "IDs are not unique" - assert len(set(ds_f["text"])) == len(ds_f), "Texts are not unique" - - long_texts = ds_f.filter(too_long_filter, num_proc=num_proc) - if len(long_texts["id"]) > 0: - logger.info(f"{len(long_texts['id'])} Long texts (>~1e5 tokens) found") - for id in long_texts["id"]: - logger.info(f"id: {id}") - else: - logger.info("No long texts (>~1e5 tokens) found") - - return ds_f - - -def main(): - # load all splits - logger.info(f"Loading data from: {hf_path}") - danish_data = load_dataset( - hf_path, streaming=False, split="train+validation", num_proc=num_proc - ) - - # filter by metadata - logger.info(f"Processing source: {source}") - danish_data = danish_data.filter(source_filter, num_proc=num_proc) - - logger.info("Processing language") - danish_data = danish_data.filter(language_filter_with_desc_stats, num_proc=None) - - # log language changes - log_pre_filter_lang_data(samples_pr_source, danish_data) - - # convert to dynaword format - danish_data = danish_data.map(dynaword_format) - danish_data = danish_data.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - # filter and log changes - danish_data = filter_with_changelog(length_filter, danish_data) - danish_data = filter_with_changelog(alpha_filter, danish_data) - danish_data = filter_with_changelog(stop_word_filter, danish_data) - - # Quality checks - danish_data = quality_checks(danish_data) - - ### saving - save_path = Path(__file__).parent / f"{source}.parquet" - danish_data.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{source}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/ncc_maalfrid/descriptive_stats.json b/data/ncc_maalfrid/descriptive_stats.json deleted file mode 100644 index 48c9cdbf0695dad15b2d70a55d8c40aa9f0ed421..0000000000000000000000000000000000000000 --- a/data/ncc_maalfrid/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 33336, - "number_of_tokens": 29260357, - "min_length_tokens": 12, - "max_length_tokens": 5105, - "number_of_characters": 84621219, - "min_length_characters": 31, - "max_length_characters": 15740 -} \ No newline at end of file diff --git a/data/ncc_maalfrid/images/dist_document_length.png b/data/ncc_maalfrid/images/dist_document_length.png deleted file mode 100644 index 0e98eacbc1bb7be5cf701b26dd28c67d68721b26..0000000000000000000000000000000000000000 --- a/data/ncc_maalfrid/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:de983db4eb01680dced765c484aedf54cecb12e4318d2318fbd032cdee77c4ef -size 561178 diff --git a/data/ncc_maalfrid/ncc_maalfrid.log b/data/ncc_maalfrid/ncc_maalfrid.log deleted file mode 100644 index b3366e61e1b3ce590a715e1a58434713f5cb61ce..0000000000000000000000000000000000000000 --- a/data/ncc_maalfrid/ncc_maalfrid.log +++ /dev/null @@ -1,21 +0,0 @@ -2025-05-08 09:18:42,463 - INFO - Loading data from: NbAiLab/NCC -2025-05-08 09:18:54,476 - INFO - Processing source: ncc_maalfrid -2025-05-08 09:20:23,697 - INFO - Processing language -2025-05-08 09:36:04,572 - INFO - Documents of ncc_maalfrid: -2025-05-08 09:36:04,572 - INFO - NO: 4462910, 66.2608% ; DA: 256659, 3.8106% -2025-05-08 09:36:04,572 - INFO - After language confidence filtering: -2025-05-08 09:36:04,572 - INFO - DA: 51523, lost: 79.9255% -2025-05-08 09:36:04,572 - INFO - Total document change: -2025-05-08 09:36:04,572 - INFO - 6735368 -> 51523, loss: 99.235% -2025-05-08 09:36:53,427 - INFO - FILTER: ['length_filter'] -2025-05-08 09:36:53,427 - INFO - TOKENS: pre: 21315653, post: 21305735, loss: 0.05% -2025-05-08 09:36:53,427 - INFO - DOCUMENTS: pre: 51523, post: 49948, loss: 3.06% -2025-05-08 09:37:37,970 - INFO - FILTER: ['alpha_filter'] -2025-05-08 09:37:37,970 - INFO - TOKENS: pre: 21305735, post: 15216676, loss: 28.58% -2025-05-08 09:37:37,970 - INFO - DOCUMENTS: pre: 49948, post: 33390, loss: 33.15% -2025-05-08 09:38:10,796 - INFO - FILTER: ['stop_word_filter'] -2025-05-08 09:38:10,797 - INFO - TOKENS: pre: 15216676, post: 15215917, loss: 0.0% -2025-05-08 09:38:10,797 - INFO - DOCUMENTS: pre: 33390, post: 33340, loss: 0.15% -2025-05-08 09:38:20,194 - INFO - Removed 0 duplicate ids -2025-05-08 09:38:20,297 - INFO - Removed 4 rows with duplicate text -2025-05-08 09:38:47,894 - INFO - No long texts (>~1e5 tokens) found diff --git a/data/ncc_maalfrid/ncc_maalfrid.md b/data/ncc_maalfrid/ncc_maalfrid.md deleted file mode 100644 index 120dd531f3af6399e1eb7ee47825cdc0cc58015f..0000000000000000000000000000000000000000 --- a/data/ncc_maalfrid/ncc_maalfrid.md +++ /dev/null @@ -1,136 +0,0 @@ ---- -pretty_name: Norwegian Colossal Corpus (maalfrid) -language: -- da -license: other -license_name: NLOD 2.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Web ---- - -# Dataset Card for Norwegian Colossal Corpus (maalfrid) - - -Danish content from Norwegian institutions websites. - - -Documents are derived from the [Målfrid collection](https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-69/) as a subsection of the [Norwegian Colossal Corpus](https://huggingface.co/datasets/NbAiLab/NCC), which is a collection of multiple smaller Norwegian corpuses suitable for training large language models. - -## Dataset Description - - -- **Number of samples**: 33.34K -- **Number of tokens (Llama 3)**: 29.26M -- **Average document length in tokens (min, max)**: 877.7404907607391 (12, 5.11K) - - - -## Dataset Structure -An example from the dataset looks as follows. - -```py -{ - "id": "maalfrid_56267641f4d6de44ab69875a31634e31e68db1a8_166", - "text": "Anno 1815, Torsdagen den 5te Octbr. blev i Sagen Fuldmægtig Engebrethsen contra Snedkermester Hansen[...]", - "source": "ncc_maalfrid", - "added": "2025-05-08", - "created": "2021-01-01, 2021-12-31", - "token_count": 742 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - - -### Dataset Statistics - - -

- -

- - -## Additional Information - -## License Information - -This dataset is licensed under [NLOD 2.0](https://data.norge.no/nlod/en/2.0). -This license is derived from the original [publication](https://huggingface.co/datasets/NbAiLab/NCC), which is published by the -[National Library of Norway](https://www.nb.no/en/). - -## Filtering - -This subset is the result of the following filtering from all available data splits on the [NCC](https://huggingface.co/datasets/NbAiLab/NCC): - -- is_maalfrid: Documents, which are tagged as a part of the Målfrid corpus -- language_filter: Document is classified as Danish with a threshold of 0.75 -- min_length: Document has at least 10 words (whitespace separated strings + punctuation) -- alpha_ratio: The ratio of all words / words with only alphabetical characters is at least 0.7 -- min_stop_words: The document contains at least 2 Danish stop words -- duplicate: Duplicate documents were removed - -| Filtering step | Number of document | -| --------------- | ------------------ | -| is_maalfrid | 4 719 569 | -| language_filter | 51523 | -| min_length | 49 948 | -| alpha_ratio | 33 390 | -| min_stop_words | 33 340 | -| duplicate | 33 336 | - - -### Citation Information - -If you use this source please cite the following articles: - -``` -@inproceedings{kummervold-etal-2022-norwegian-colossal, - title = {The {N}orwegian colossal corpus: A text corpus for training large {N}orwegian language models}, - author = {Kummervold, Per E and - Wetjen, Freddy and - De la Rosa, Javier}, - booktitle = {Proceedings of the Thirteenth Language Resources and Evaluation Conference (LREC)}, - year = {2022}, - address = {Marseille, France}, - publisher = {European Language Resources Association}, - url = {https://aclanthology.org/2022.lrec-1.410}, - pages = {3852--3860}, - abstract = {Norwegian has been one of many languages lacking sufficient available text to train quality language models. In an attempt to bridge this gap, we introduce the Norwegian Colossal Corpus (NCC), which comprises 49GB of clean Norwegian textual data containing over 7B words. The NCC is composed of different and varied sources, ranging from books and newspapers to government documents and public reports, showcasing the various uses of the Norwegian language in society. The corpus contains mainly Norwegian Bokmål and Norwegian Nynorsk. Each document in the corpus is tagged with metadata that enables the creation of sub-corpora for specific needs. Its structure makes it easy to combine with large web archives that for licensing reasons could not be distributed together with the NCC. By releasing this corpus openly to the public, we hope to foster the creation of both better Norwegian language models and multilingual language models with support for Norwegian.}, -} - -@inproceedings{kummervold-etal-2021-operationalizing, - title = {Operationalizing a National Digital Library: The Case for a {N}orwegian Transformer Model}, - author = {Kummervold, Per E and - De la Rosa, Javier and - Wetjen, Freddy and - Brygfjeld, Svein Arne}, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa)}, - year = {2021}, - address = {Reykjavik, Iceland (Online)}, - publisher = {Linköping University Electronic Press, Sweden}, - url = {https://aclanthology.org/2021.nodalida-main.3}, - pages = {20--29}, - abstract = {In this work, we show the process of building a large-scale training set from digital and digitized collections at a national library. - The resulting Bidirectional Encoder Representations from Transformers (BERT)-based language model for Norwegian outperforms multilingual BERT (mBERT) models - in several token and sequence classification tasks for both Norwegian Bokmål and Norwegian Nynorsk. Our model also improves the mBERT performance for other - languages present in the corpus such as English, Swedish, and Danish. For languages not included in the corpus, the weights degrade moderately while keeping strong multilingual properties. Therefore, - we show that building high-quality models within a memory institution using somewhat noisy optical character recognition (OCR) content is feasible, and we hope to pave the way for other memory institutions to follow.}, -} - -``` diff --git a/data/ncc_maalfrid/ncc_maalfrid.parquet b/data/ncc_maalfrid/ncc_maalfrid.parquet deleted file mode 100644 index 0f6f5516e476f00299561622eddf67bc2c50d871..0000000000000000000000000000000000000000 --- a/data/ncc_maalfrid/ncc_maalfrid.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:61c5d07f6634386872cc94aa6e2f670bb5d928364360fa934565c35b8fd1ba5d -size 54970623 diff --git a/data/ncc_newspaper/create.py b/data/ncc_newspaper/create.py deleted file mode 100644 index b4021179bce55048e5905f9468954598b1505710..0000000000000000000000000000000000000000 --- a/data/ncc_newspaper/create.py +++ /dev/null @@ -1,345 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0" -# ] -# /// -# setup -import logging -import re -import inspect - -from pathlib import Path -from datetime import datetime -from collections import defaultdict -from collections.abc import Callable - -import pandas as pd -from datasets import Dataset, load_dataset - -logger = logging.getLogger(__name__) -########## edit manually for each source -hf_path = "NbAiLab/NCC" -source = "ncc_newspaper" -license = "cc0-1.0" -domain = "News" -num_proc = 8 -########## -today = datetime.now().strftime("%Y-%m-%d") - -# stop words taken from spaCy -# https://github.com/explosion/spaCy/blob/master/spacy/lang/da/stop_words.py -# Source: Handpicked by Jens Dahl Møllerhøj. -spacy_sw = set( - """ -af aldrig alene alle allerede alligevel alt altid anden andet andre at - -bag begge blandt blev blive bliver burde bør - -da de dem den denne dens der derefter deres derfor derfra deri dermed derpå derved det dette dig din dine disse dog du - -efter egen eller ellers en end endnu ene eneste enhver ens enten er et - -flere flest fleste for foran fordi forrige fra få før først - -gennem gjorde gjort god gør gøre gørende - -ham han hans har havde have hel heller hen hende hendes henover her herefter heri hermed herpå hun hvad hvem hver hvilke hvilken hvilkes hvis hvor hvordan hvorefter hvorfor hvorfra hvorhen hvori hvorimod hvornår hvorved - -i igen igennem ikke imellem imens imod ind indtil ingen intet - -jeg jer jeres jo - -kan kom kommer kun kunne - -lad langs lav lave lavet lidt lige ligesom lille længere - -man mange med meget mellem men mens mere mest mig min mindre mindst mine mit må måske - -ned nemlig nogen nogensinde noget nogle nok nu ny nyt nær næste næsten - -og også om omkring op os over overalt - -på - -samme sammen selv selvom senere ses siden sig sige skal skulle som stadig synes syntes så sådan således - -temmelig tidligere til tilbage tit - -ud uden udover under undtagen - -var ved vi via vil ville vore vores vær være været - -øvrigt -""".split() -) - - -# functions -def word_tokenize(text: str) -> list[str]: - """ - Tokenizes a string into words, splitting on whitespace and punctuation. - - Example: - >>> word_tokenize("Hello, world!") - ['Hello', ',', 'world', '!'] - >>> word_tokenize("This is a test.") - ['This', 'is', 'a', 'test', '.'] - >>> word_tokenize("Many spaces between words.") - ['Many', 'spaces', 'between', 'words', '.'] - """ - - punkt = [",", ".", "!", "?", ":", ";", "(", ")", "[", "]", "{", "}", '"', "'"] - for p in punkt: - text = text.replace(p, f" {p} ") - return text.split() - - -def alpha_ratio(text: str | list[str]) -> float: - """ - If not split already to words, splits text with word_tokenize() - Calculates ratio of words with only alphabetical characters - - """ - if type(text) is str: - text = word_tokenize(text) - else: - pass - - alpha_ratio = 1 - sum(not word.isalpha() for word in text) / len(text) - - return alpha_ratio - - -def count_min_target(given_list: list, target_list: list, min: int) -> bool: - """ - Iterates through given list, until at least min items match any items from target list - - """ - c_item = 0 - given_list_iter = iter(given_list) - while c_item < min: - try: - current_item = next(given_list_iter) - if current_item in target_list: - c_item += 1 - except StopIteration: - break - - return c_item == min - - -def dynaword_format( - meta_document: dict[str, str | int], -) -> dict[str, str | dict[str, str]]: - """Reformats data to fit dynaword standards""" - - text = meta_document.get("text") - id = meta_document.get("id") - date = meta_document.get("publish_year") - doc_type = meta_document.get("doc_type") - - newdata = { - "text": text, - "source": source, - "id": id, - "added": today, - "created": f"{date}-01-01, {date}-12-31", - "license": license, - "domain": domain, - "metadata": { - "source-pretty": f"Norwegian Colossal Corpus ({re.sub('ncc_', '', source)})", - "source-type": doc_type, - }, - } - - return newdata - - -def log_pre_filter_lang_data( - lang_metadata: dict[str, dict[str, int]], filtered_ds: Dataset -): - """ - Function for logging changes in a large dataset, - based on the metadata pre filering and the filtered dataset, - used for language filtering - """ - all_docs = sum(lang_metadata[source].values()) - no_docs = lang_metadata[source].get("no") - da_docs = lang_metadata[source].get("da") - no_perc = round(no_docs / all_docs * 100, 4) - da_perc = round(da_docs / all_docs * 100, 4) - - f_length = len(filtered_ds) - f_perc = round(f_length / da_docs * 100, 4) - f_total_perc = round(f_length / all_docs * 100, 4) - - logger.info(f"Documents of {source}:") - logger.info(f"NO: {no_docs}, {no_perc}% ; DA: {da_docs}, {da_perc}%") - logger.info("After language confidence filtering:") - logger.info(f"DA: {f_length}, lost: {100 - f_perc}%") - logger.info("Total document change:") - logger.info(f"{all_docs} -> {f_length}, loss: {100 - f_total_perc}%") - - -def get_var_name(var): - """outputs the variable name""" - callers_local_vars = inspect.currentframe().f_back.f_back.f_back.f_locals.items() - return [var_name for var_name, var_val in callers_local_vars if var_val is var] - - -def filter_with_changelog( - filter_func: Callable[[Dataset], Dataset], dataset: Dataset -) -> Dataset: - """ - Function, which takes a filter and a dataset. - Counts text docs and tokens before and after filtering, - Saves filtering changes to log. - """ - - filter_name = get_var_name(filter_func) - pre_filter_docs = len(dataset) - pre_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - - dataset = dataset.filter(filter_func, num_proc=num_proc) - - post_filter_docs = len(dataset) - post_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - tokens_removed = round((1 - (post_filter_tokens / pre_filter_tokens)) * 100, 2) - docs_removed = round((1 - (post_filter_docs / pre_filter_docs)) * 100, 2) - - logger.info(f"FILTER: {filter_name}") - logger.info( - f"TOKENS: pre: {pre_filter_tokens}, post: {post_filter_tokens}, loss: {tokens_removed}%" - ) - logger.info( - f"DOCUMENTS: pre: {pre_filter_docs}, post: {post_filter_docs}, loss: {docs_removed}%" - ) - - return dataset - - -# filters -source_filter = lambda ds: re.sub("ncc_", "", source) in ds["doc_type"] # noqa -length_filter = lambda ds: len(word_tokenize(ds["text"])) >= 10 # noqa -too_long_filter = lambda ds: len(word_tokenize(ds["text"])) > 1e5 # noqa -alpha_filter = lambda ds: alpha_ratio(ds["text"]) >= 0.7 # noqa -stop_word_filter = lambda ds: count_min_target(word_tokenize(ds["text"]), spacy_sw, 2) # noqa - -samples_pr_source: dict = defaultdict(lambda: defaultdict(int)) - - -def language_filter_with_desc_stats(ds: Dataset) -> bool: - """ - Language filtering in a streamed dataset while logging all languages - """ - s = source - language = ds["lang_fasttext"] - samples_pr_source[s][language] += 1 - - language_filter = ( - ds["lang_fasttext"] == "da" and float(ds["lang_fasttext_conf"]) >= 0.5 - ) - - return language_filter - - -# quality checks -def quality_checks(ds: Dataset) -> Dataset: - """ - Quality checks for: - - no duplicate ids - - no duplicate texts - - logs texts > 1e5 tokens - """ - # convert to pandas for the drop_duplicates() - df = pd.DataFrame(ds) - # remove duplicate ids - len_df = len(df) - df = df.drop_duplicates(subset=["id"]) - logger.info(f"Removed {len_df - len(df)} duplicate ids") - # remove rows with duplicate text - len_df = len(df) - df = df.drop_duplicates(subset=["text"]) - logger.info(f"Removed {len_df - len(df)} rows with duplicate text") - # reconvert and remove index - ds_f = Dataset.from_pandas(df, preserve_index=False) - try: - ds_f["__index_level_0__"] - ds_f = ds_f.remove_columns("__index_level_0__") - except KeyError: - pass - - assert len(set(ds_f["id"])) == len(ds_f), "IDs are not unique" - assert len(set(ds_f["text"])) == len(ds_f), "Texts are not unique" - - long_texts = ds_f.filter(too_long_filter, num_proc=num_proc) - if len(long_texts["id"]) > 0: - logger.info(f"{len(long_texts['id'])} Long texts (>~1e5 tokens) found") - for id in long_texts["id"]: - logger.info(f"id: {id}") - else: - logger.info("No long texts (>~1e5 tokens) found") - - return ds_f - - -# main -def main(): - # load all splits - logger.info(f"Loading data from: {hf_path}") - data = load_dataset(hf_path, streaming=True) - data_list = [] - - for split in data: - # filter by metadata - logger.info(f"Processing source: {source}, split: {split}") - s_data = data[split].filter(source_filter) - - logger.info(f"Processing language, split: {split}") - s_data = s_data.filter(language_filter_with_desc_stats) - - # convert from iterable dataset - data_iter = iter(s_data) - while True: - try: - data_list.append(next(data_iter)) - except StopIteration: - break - danish_data = Dataset.from_list(data_list) - del data_list - - # log language changes - log_pre_filter_lang_data(samples_pr_source, danish_data) - - # convert to dynaword format - logger.info("Assembling whole dataset for filtering") - danish_data = danish_data.map(dynaword_format) - danish_data = danish_data.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - # filter and log changes - danish_data = filter_with_changelog(length_filter, danish_data) - danish_data = filter_with_changelog(alpha_filter, danish_data) - danish_data = filter_with_changelog(stop_word_filter, danish_data) - - # Quality checks - danish_data = quality_checks(danish_data) - - ### saving - save_path = Path(__file__).parent / f"{source}.parquet" - danish_data.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{source}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/ncc_newspaper/descriptive_stats.json b/data/ncc_newspaper/descriptive_stats.json deleted file mode 100644 index 5a07f0fac78736cb83709ab65c90b2b17a37bc67..0000000000000000000000000000000000000000 --- a/data/ncc_newspaper/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 5373, - "number_of_tokens": 1052890, - "min_length_tokens": 12, - "max_length_tokens": 3851, - "number_of_characters": 3071706, - "min_length_characters": 33, - "max_length_characters": 10689 -} \ No newline at end of file diff --git a/data/ncc_newspaper/images/dist_document_length.png b/data/ncc_newspaper/images/dist_document_length.png deleted file mode 100644 index ea6600468b7bcb0f966629dcf279c9e7a22ab474..0000000000000000000000000000000000000000 --- a/data/ncc_newspaper/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0f2c304bf6092db6c82337cc75af057c1a7a400db6de2ca98bb4f52baebf8a86 -size 556296 diff --git a/data/ncc_newspaper/ncc_newspaper.log b/data/ncc_newspaper/ncc_newspaper.log deleted file mode 100644 index 8a7e53109d54a70611f3e12cfe71cfb7f30b8997..0000000000000000000000000000000000000000 --- a/data/ncc_newspaper/ncc_newspaper.log +++ /dev/null @@ -1,39 +0,0 @@ -2025-05-01 07:46:31,692 - INFO - Loading data from: NbAiLab/NCC -2025-05-01 07:46:35,756 - INFO - Processing source: ncc_newspaper, split: train -2025-05-01 07:46:35,757 - INFO - Processing language, split: train -2025-05-01 09:08:21,490 - INFO - Loading data from: NbAiLab/NCC -2025-05-01 09:08:35,451 - INFO - Processing source: ncc_newspaper, split: train -2025-05-01 09:08:35,453 - INFO - Processing language, split: train -2025-05-01 09:51:35,309 - WARNING - '(MaxRetryError('HTTPSConnectionPool(host=\'huggingface.co\', port=443): Max retries exceeded with url: /datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl (Caused by NameResolutionError(": Failed to resolve \'huggingface.co\' ([Errno 11001] getaddrinfo failed)"))'), '(Request ID: 8c7cfa56-5cbe-4113-ae0b-9b9192c59c61)')' thrown while requesting GET https://huggingface.co/datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl -2025-05-01 09:51:35,330 - WARNING - Retrying in 1s [Retry 1/5]. -2025-05-01 09:51:36,342 - WARNING - '(MaxRetryError('HTTPSConnectionPool(host=\'huggingface.co\', port=443): Max retries exceeded with url: /datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl (Caused by NameResolutionError(": Failed to resolve \'huggingface.co\' ([Errno 11001] getaddrinfo failed)"))'), '(Request ID: 8a52f0ab-4507-4af3-9de8-44600dcbe92b)')' thrown while requesting GET https://huggingface.co/datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl -2025-05-01 09:51:36,343 - WARNING - Retrying in 2s [Retry 2/5]. -2025-05-01 09:51:38,346 - WARNING - '(MaxRetryError('HTTPSConnectionPool(host=\'huggingface.co\', port=443): Max retries exceeded with url: /datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl (Caused by NameResolutionError(": Failed to resolve \'huggingface.co\' ([Errno 11001] getaddrinfo failed)"))'), '(Request ID: 7004c6e6-9238-4ae1-9b2c-625361ec2495)')' thrown while requesting GET https://huggingface.co/datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl -2025-05-01 09:51:38,347 - WARNING - Retrying in 4s [Retry 3/5]. -2025-05-01 10:34:26,967 - WARNING - '(MaxRetryError('HTTPSConnectionPool(host=\'huggingface.co\', port=443): Max retries exceeded with url: /datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl (Caused by NameResolutionError(": Failed to resolve \'huggingface.co\' ([Errno 11001] getaddrinfo failed)"))'), '(Request ID: e818f8c8-4815-4b64-95f1-1ea5d68005b7)')' thrown while requesting GET https://huggingface.co/datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl -2025-05-01 10:34:26,976 - WARNING - Retrying in 8s [Retry 4/5]. -2025-05-01 10:34:34,996 - WARNING - '(MaxRetryError('HTTPSConnectionPool(host=\'huggingface.co\', port=443): Max retries exceeded with url: /datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl (Caused by NameResolutionError(": Failed to resolve \'huggingface.co\' ([Errno 11001] getaddrinfo failed)"))'), '(Request ID: 3b7514c2-ff9c-4634-b738-535764ff6b86)')' thrown while requesting GET https://huggingface.co/datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl -2025-05-01 10:34:34,996 - WARNING - Retrying in 8s [Retry 5/5]. -2025-05-01 10:34:43,000 - WARNING - '(MaxRetryError('HTTPSConnectionPool(host=\'huggingface.co\', port=443): Max retries exceeded with url: /datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl (Caused by NameResolutionError(": Failed to resolve \'huggingface.co\' ([Errno 11001] getaddrinfo failed)"))'), '(Request ID: 430a9373-6149-47c5-a660-aa1b82df18d3)')' thrown while requesting GET https://huggingface.co/datasets/NbAiLab/NCC/resolve/857a5832b73ef33c66b5674d970777c39d991c0e/data/train-shard-0010-of-0046.jsonl -2025-05-01 10:34:43,001 - WARNING - Got disconnected from remote data host. Retrying in 5sec [1/20] -2025-05-01 11:24:41,107 - INFO - Processing source: ncc_newspaper, split: validation -2025-05-01 11:24:41,121 - INFO - Processing language, split: validation -2025-05-01 11:26:07,574 - INFO - Documents of ncc_newspaper: -2025-05-01 11:26:07,575 - INFO - NO: 487086, 73.2081% ; DA: 17516, 2.6326% -2025-05-01 11:26:07,575 - INFO - After language confidence filtering: -2025-05-01 11:26:07,577 - INFO - DA: 7632, lost: 56.4284% -2025-05-01 11:26:07,577 - INFO - Total document change: -2025-05-01 11:26:07,578 - INFO - 665344 -> 7632, loss: 98.8529% -2025-05-01 11:26:07,578 - INFO - Assembling whole dataset for filtering -2025-05-01 11:26:24,562 - INFO - FILTER: ['length_filter'] -2025-05-01 11:26:24,562 - INFO - TOKENS: pre: 669129, post: 661484, loss: 1.14% -2025-05-01 11:26:24,563 - INFO - DOCUMENTS: pre: 7632, post: 6401, loss: 16.13% -2025-05-01 11:26:31,510 - INFO - FILTER: ['alpha_filter'] -2025-05-01 11:26:31,511 - INFO - TOKENS: pre: 661484, post: 616869, loss: 6.74% -2025-05-01 11:26:31,511 - INFO - DOCUMENTS: pre: 6401, post: 5439, loss: 15.03% -2025-05-01 11:26:37,466 - INFO - FILTER: ['stop_word_filter'] -2025-05-01 11:26:37,467 - INFO - TOKENS: pre: 616869, post: 616059, loss: 0.13% -2025-05-01 11:26:37,467 - INFO - DOCUMENTS: pre: 5439, post: 5374, loss: 1.2% -2025-05-01 11:26:38,121 - INFO - Removed 0 duplicate ids -2025-05-01 11:26:38,129 - INFO - Removed 1 rows with duplicate text -2025-05-01 11:26:42,145 - INFO - No long texts (>~1e5 tokens) found diff --git a/data/ncc_newspaper/ncc_newspaper.md b/data/ncc_newspaper/ncc_newspaper.md deleted file mode 100644 index 7e29e0c255f3e0a92b38e35716193476c161c623..0000000000000000000000000000000000000000 --- a/data/ncc_newspaper/ncc_newspaper.md +++ /dev/null @@ -1,142 +0,0 @@ ---- -pretty_name: Norwegian Colossal Corpus (newspaper) -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- News ---- - -# Dataset Card for Norwegian Colossal Corpus (newspaper) - - -OCR'd Newspapers derived from [NCC](https://huggingface.co/datasets/NbAiLab/NCC) - - -The Norwegian Colossal Corpus is a collection of multiple smaller Norwegian corpuses suitable for training large language models. - -## Dataset Description - - -- **Number of samples**: 5.37K -- **Number of tokens (Llama 3)**: 1.05M -- **Average document length in tokens (min, max)**: 195.95942676344686 (12, 3.85K) - - - -## Dataset Structure -An example from the dataset looks as follows. - -```py -{ - "id": "fylkestidendeforsognogfjordane_null_null_19410723_69_54_1_MODSMD_ARTICLE5", - "text": "STOCKHOLM: Det er kommet melding Ul den svenske turlst forenlng om at de to svenske ljellklatrerne s[...]", - "source": "ncc_newspaper", - "added": "2025-05-01", - "created": "1941-01-01, 1941-12-31", - "token_count": 137 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - - -### Dataset Statistics - - -

- -

- - -# Additional Information - -## License Information - -This dataset is licensed under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/). -This license is derived from the original [publication](https://huggingface.co/datasets/NbAiLab/NCC), which is published by the -[National Library of Norway](https://www.nb.no/en/). - -## Filtering - -This subset is the result of the following filtering from all available data splits on the [NCC](https://huggingface.co/datasets/NbAiLab/NCC): - -- is_books: Documents, which are tagged as newspapers -- language_filter: Document is classified as Danish with a threshold of 0.75 -- min_length: Document has at least 10 words (whitespace separated strings + punctuation) -- alpha_ratio: The ratio of all words / words with only alphabetical characters is at least 0.7 -- min_stop_words: The document contains at least 2 Danish stop words -- duplicate: Duplicate documents were removed - -The effect of each of these steps is outlined in the table below: - -| Filtering step | Number of document | -| --------------- | ------------------ | -| is_books | 504 602 | -| language_filter | 7 632 | -| min_length | 6 401 | -| alpha_ratio | 5 439 | -| min_stop_words | 5 374 | -| duplicate | 5 373 | - -## Quality - -It is important to note, that recurring [OCR](https://en.wikipedia.org/wiki/Optical_character_recognition) errors and historic expressions in older -texts hinder the legibility of some of the documents and make differentiating between Norwegian and Danish difficult. - -### Citation Information - -If you use this source please cite the following articles: - -``` -@inproceedings{kummervold-etal-2022-norwegian-colossal, - title = {The {N}orwegian colossal corpus: A text corpus for training large {N}orwegian language models}, - author = {Kummervold, Per E and - Wetjen, Freddy and - De la Rosa, Javier}, - booktitle = {Proceedings of the Thirteenth Language Resources and Evaluation Conference (LREC)}, - year = {2022}, - address = {Marseille, France}, - publisher = {European Language Resources Association}, - url = {https://aclanthology.org/2022.lrec-1.410}, - pages = {3852--3860}, - abstract = {Norwegian has been one of many languages lacking sufficient available text to train quality language models. In an attempt to bridge this gap, we introduce the Norwegian Colossal Corpus (NCC), which comprises 49GB of clean Norwegian textual data containing over 7B words. The NCC is composed of different and varied sources, ranging from books and newspapers to government documents and public reports, showcasing the various uses of the Norwegian language in society. The corpus contains mainly Norwegian Bokmål and Norwegian Nynorsk. Each document in the corpus is tagged with metadata that enables the creation of sub-corpora for specific needs. Its structure makes it easy to combine with large web archives that for licensing reasons could not be distributed together with the NCC. By releasing this corpus openly to the public, we hope to foster the creation of both better Norwegian language models and multilingual language models with support for Norwegian.}, -} - -@inproceedings{kummervold-etal-2021-operationalizing, - title = {Operationalizing a National Digital Library: The Case for a {N}orwegian Transformer Model}, - author = {Kummervold, Per E and - De la Rosa, Javier and - Wetjen, Freddy and - Brygfjeld, Svein Arne}, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa)}, - year = {2021}, - address = {Reykjavik, Iceland (Online)}, - publisher = {Linköping University Electronic Press, Sweden}, - url = {https://aclanthology.org/2021.nodalida-main.3}, - pages = {20--29}, - abstract = {In this work, we show the process of building a large-scale training set from digital and digitized collections at a national library. - The resulting Bidirectional Encoder Representations from Transformers (BERT)-based language model for Norwegian outperforms multilingual BERT (mBERT) models - in several token and sequence classification tasks for both Norwegian Bokmål and Norwegian Nynorsk. Our model also improves the mBERT performance for other - languages present in the corpus such as English, Swedish, and Danish. For languages not included in the corpus, the weights degrade moderately while keeping strong multilingual properties. Therefore, - we show that building high-quality models within a memory institution using somewhat noisy optical character recognition (OCR) content is feasible, and we hope to pave the way for other memory institutions to follow.}, -} - -``` diff --git a/data/ncc_newspaper/ncc_newspaper.parquet b/data/ncc_newspaper/ncc_newspaper.parquet deleted file mode 100644 index e16d847aa0611a5360bb1912055ded5167f0e21b..0000000000000000000000000000000000000000 --- a/data/ncc_newspaper/ncc_newspaper.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6876f892b2ce4e0ca59fc19d6742547fb909edfe28e33466ad172420770311d2 -size 2419112 diff --git a/data/ncc_parliament/create.py b/data/ncc_parliament/create.py deleted file mode 100644 index bfeb2c6d66aab62b7a60c70d0465011d5761a5af..0000000000000000000000000000000000000000 --- a/data/ncc_parliament/create.py +++ /dev/null @@ -1,332 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets>=3.2.0" -# ] -# /// - -import inspect -import logging -import re -from collections import defaultdict -from collections.abc import Callable -from datetime import datetime -from pathlib import Path - -import pandas as pd -from datasets import Dataset, load_dataset - -logger = logging.getLogger(__name__) -########## edit manually for each source -hf_path = "NbAiLab/NCC" -source = "ncc_parliament" -doc_type_searchword = "parliament" -license = "other" -domain = "Legal" -num_proc = 10 -########## -today = datetime.now().strftime("%Y-%m-%d") - -# stop words taken from spaCy -# https://github.com/explosion/spaCy/blob/master/spacy/lang/da/stop_words.py -# Source: Handpicked by Jens Dahl Møllerhøj. -spacy_sw = set( - """ -af aldrig alene alle allerede alligevel alt altid anden andet andre at - -bag begge blandt blev blive bliver burde bør - -da de dem den denne dens der derefter deres derfor derfra deri dermed derpå derved det dette dig din dine disse dog du - -efter egen eller ellers en end endnu ene eneste enhver ens enten er et - -flere flest fleste for foran fordi forrige fra få før først - -gennem gjorde gjort god gør gøre gørende - -ham han hans har havde have hel heller hen hende hendes henover her herefter heri hermed herpå hun hvad hvem hver hvilke hvilken hvilkes hvis hvor hvordan hvorefter hvorfor hvorfra hvorhen hvori hvorimod hvornår hvorved - -i igen igennem ikke imellem imens imod ind indtil ingen intet - -jeg jer jeres jo - -kan kom kommer kun kunne - -lad langs lav lave lavet lidt lige ligesom lille længere - -man mange med meget mellem men mens mere mest mig min mindre mindst mine mit må måske - -ned nemlig nogen nogensinde noget nogle nok nu ny nyt nær næste næsten - -og også om omkring op os over overalt - -på - -samme sammen selv selvom senere ses siden sig sige skal skulle som stadig synes syntes så sådan således - -temmelig tidligere til tilbage tit - -ud uden udover under undtagen - -var ved vi via vil ville vore vores vær være været - -øvrigt -""".split() -) - - -def word_tokenize(text: str) -> list[str]: - """ - Tokenizes a string into words, splitting on whitespace and punctuation. - - Example: - >>> word_tokenize("Hello, world!") - ['Hello', ',', 'world', '!'] - >>> word_tokenize("This is a test.") - ['This', 'is', 'a', 'test', '.'] - >>> word_tokenize("Many spaces between words.") - ['Many', 'spaces', 'between', 'words', '.'] - """ - - punkt = [",", ".", "!", "?", ":", ";", "(", ")", "[", "]", "{", "}", '"', "'"] - for p in punkt: - text = text.replace(p, f" {p} ") - return text.split() - - -def alpha_ratio(text: str | list[str]) -> float: - """ - If not split already to words, splits text with word_tokenize() - Calculates ratio of words with only alphabetical characters - - """ - if type(text) is str: - text = word_tokenize(text) - else: - pass - - alpha_ratio = 1 - sum(not word.isalpha() for word in text) / len(text) - - return alpha_ratio - - -def count_min_target(given_list: list, target_list: list, min: int) -> bool: - """ - Iterates through given list, until at least min items match any items from target list - - """ - c_item = 0 - given_list_iter = iter(given_list) - while c_item < min: - try: - current_item = next(given_list_iter) - if current_item in target_list: - c_item += 1 - except StopIteration: - break - - return c_item == min - - -def dynaword_format( - meta_document: dict[str, str | int], -) -> dict[str, str | dict[str, str]]: - """Reformats data to fit dynaword standards""" - - text = meta_document.get("text") - id = meta_document.get("id") - date = meta_document.get("publish_year") - doc_type = meta_document.get("doc_type") - - newdata = { - "text": text, - "source": source, - "id": id, - "added": today, - "created": f"{date}-01-01, {date}-12-31", - "license": license, - "domain": domain, - "metadata": { - "source-pretty": f"Norwegian Colossal Corpus ({re.sub('ncc_', '', source)})", - "source-type": doc_type, - }, - } - - return newdata - - -def log_pre_filter_lang_data( - lang_metadata: dict[str, dict[str, int]], filtered_ds: Dataset -): - """ - Function for logging changes in a large dataset, - based on the metadata pre filering and the filtered dataset, - used for language filtering - """ - all_docs = sum(lang_metadata[source].values()) - no_docs = lang_metadata[source].get("no") - da_docs = lang_metadata[source].get("da") - no_perc = round(no_docs / all_docs * 100, 4) - da_perc = round(da_docs / all_docs * 100, 4) - - f_length = len(filtered_ds) - f_perc = round(f_length / da_docs * 100, 4) - f_total_perc = round(f_length / all_docs * 100, 4) - - logger.info(f"Documents of {source}:") - logger.info(f"NO: {no_docs}, {no_perc}% ; DA: {da_docs}, {da_perc}%") - logger.info("After language confidence filtering:") - logger.info(f"DA: {f_length}, lost: {100 - f_perc}%") - logger.info("Total document change:") - logger.info(f"{all_docs} -> {f_length}, loss: {100 - f_total_perc}%") - - -def get_var_name(var): - """outputs the variable name""" - callers_local_vars = inspect.currentframe().f_back.f_back.f_back.f_locals.items() - return [var_name for var_name, var_val in callers_local_vars if var_val is var] - - -def filter_with_changelog( - filter_func: Callable[[Dataset], Dataset], dataset: Dataset -) -> Dataset: - """ - Function, which takes a filter and a dataset. - Counts text docs and tokens before and after filtering, - Saves filtering changes to log. - """ - - filter_name = get_var_name(filter_func) - pre_filter_docs = len(dataset) - pre_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - - dataset = dataset.filter(filter_func, num_proc=num_proc) - - post_filter_docs = len(dataset) - post_filter_tokens = sum(len(word_tokenize(i["text"])) for i in dataset) - tokens_removed = round((1 - (post_filter_tokens / pre_filter_tokens)) * 100, 2) - docs_removed = round((1 - (post_filter_docs / pre_filter_docs)) * 100, 2) - - logger.info(f"FILTER: {filter_name}") - logger.info( - f"TOKENS: pre: {pre_filter_tokens}, post: {post_filter_tokens}, loss: {tokens_removed}%" - ) - logger.info( - f"DOCUMENTS: pre: {pre_filter_docs}, post: {post_filter_docs}, loss: {docs_removed}%" - ) - - return dataset - - -source_filter = lambda ds: doc_type_searchword in ds["doc_type"] # noqa -length_filter = lambda ds: len(word_tokenize(ds["text"])) >= 10 # noqa -too_long_filter = lambda ds: len(word_tokenize(ds["text"])) > 1e5 # noqa -alpha_filter = lambda ds: alpha_ratio(ds["text"]) >= 0.7 # noqa -stop_word_filter = lambda ds: count_min_target(word_tokenize(ds["text"]), spacy_sw, 2) # noqa - -samples_pr_source: dict = defaultdict(lambda: defaultdict(int)) - - -def language_filter_with_desc_stats(ds: Dataset) -> bool: - """ - Language filtering in a streamed dataset while logging all languages - """ - s = source - language = ds["lang_fasttext"] - samples_pr_source[s][language] += 1 - - language_filter = ( - ds["lang_fasttext"] == "da" and float(ds["lang_fasttext_conf"]) >= 0.75 - ) - - return language_filter - - -def quality_checks(ds: Dataset) -> Dataset: - """ - Quality checks for: - - no duplicate ids - - no duplicate texts - - logs texts > 1e5 tokens - """ - # convert to pandas for the drop_duplicates() - df = pd.DataFrame(ds) - # remove duplicate ids - len_df = len(df) - df = df.drop_duplicates(subset=["id"]) - logger.info(f"Removed {len_df - len(df)} duplicate ids") - # remove rows with duplicate text - len_df = len(df) - df = df.drop_duplicates(subset=["text"]) - logger.info(f"Removed {len_df - len(df)} rows with duplicate text") - # reconvert and remove index - ds_f = Dataset.from_pandas(df, preserve_index=False) - try: - ds_f["__index_level_0__"] - ds_f = ds_f.remove_columns("__index_level_0__") - except KeyError: - pass - - assert len(set(ds_f["id"])) == len(ds_f), "IDs are not unique" - assert len(set(ds_f["text"])) == len(ds_f), "Texts are not unique" - - long_texts = ds_f.filter(too_long_filter, num_proc=None) - if len(long_texts["id"]) > 0: - logger.info(f"{len(long_texts['id'])} Long texts (>~1e5 tokens) found") - for id in long_texts["id"]: - logger.info(f"id: {id}") - else: - logger.info("No long texts (>~1e5 tokens) found") - - return ds_f - - -def main(): - # load all splits - logger.info(f"Loading data from: {hf_path}") - - danish_data = load_dataset( - hf_path, streaming=False, split="train+validation", num_proc=num_proc - ) - danish_data.cleanup_cache_files() - - # filter by metadata - logger.info(f"Processing source: {source}") - danish_data = danish_data.filter(source_filter, num_proc=num_proc) - - logger.info("Processing language") - danish_data = danish_data.filter(language_filter_with_desc_stats, num_proc=None) - - # log language changes - log_pre_filter_lang_data(samples_pr_source, danish_data) - - # convert to dynaword format - danish_data = danish_data.map(dynaword_format) - danish_data = danish_data.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - # filter and log changes - danish_data = filter_with_changelog(length_filter, danish_data) - danish_data = filter_with_changelog(alpha_filter, danish_data) - danish_data = filter_with_changelog(stop_word_filter, danish_data) - - # Quality checks - danish_data = quality_checks(danish_data) - - ### saving - save_path = Path(__file__).parent / f"{source}.parquet" - danish_data.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{source}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/ncc_parliament/descriptive_stats.json b/data/ncc_parliament/descriptive_stats.json deleted file mode 100644 index 149e83a2d6accd5e778bf963b7dfbd050ecf104d..0000000000000000000000000000000000000000 --- a/data/ncc_parliament/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 1077, - "number_of_tokens": 338871082, - "min_length_tokens": 129, - "max_length_tokens": 373587, - "number_of_characters": 980786678, - "min_length_characters": 369, - "max_length_characters": 1000000 -} \ No newline at end of file diff --git a/data/ncc_parliament/images/dist_document_length.png b/data/ncc_parliament/images/dist_document_length.png deleted file mode 100644 index a8fa49d76b88c3dd26b6e9b584bc4b498d7fdee6..0000000000000000000000000000000000000000 --- a/data/ncc_parliament/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8b87d8a58884e53b291a06bfa10130e4e490b0f56e98fc255e19a6fdb40026de -size 549692 diff --git a/data/ncc_parliament/ncc_parliament.log b/data/ncc_parliament/ncc_parliament.log deleted file mode 100644 index 71befcc8c0544513a16758030effc1e7d7ae3ea7..0000000000000000000000000000000000000000 --- a/data/ncc_parliament/ncc_parliament.log +++ /dev/null @@ -1,997 +0,0 @@ -2025-05-08 14:10:46,633 - INFO - Loading data from: NbAiLab/NCC -2025-05-08 14:10:58,217 - INFO - Processing source: ncc_parliament -2025-05-08 14:12:36,354 - INFO - Processing language -2025-05-08 14:13:17,146 - INFO - Documents of ncc_parliament: -2025-05-08 14:13:17,146 - INFO - NO: 6478, 67.9891% ; DA: 3050, 32.0109% -2025-05-08 14:13:17,146 - INFO - After language confidence filtering: -2025-05-08 14:13:17,146 - INFO - DA: 1275, lost: 58.1967% -2025-05-08 14:13:17,146 - INFO - Total document change: -2025-05-08 14:13:17,146 - INFO - 9528 -> 1275, loss: 86.6184% -2025-05-08 14:15:40,107 - INFO - FILTER: ['length_filter'] -2025-05-08 14:15:40,121 - INFO - TOKENS: pre: 215301994, post: 215301994, loss: 0.0% -2025-05-08 14:15:40,121 - INFO - DOCUMENTS: pre: 1275, post: 1275, loss: 0.0% -2025-05-08 14:17:43,984 - INFO - FILTER: ['alpha_filter'] -2025-05-08 14:17:43,984 - INFO - TOKENS: pre: 215301994, post: 184274856, loss: 14.41% -2025-05-08 14:17:43,984 - INFO - DOCUMENTS: pre: 1275, post: 1077, loss: 15.53% -2025-05-08 14:19:38,840 - INFO - FILTER: ['stop_word_filter'] -2025-05-08 14:19:38,840 - INFO - TOKENS: pre: 184274856, post: 184274856, loss: 0.0% -2025-05-08 14:19:38,840 - INFO - DOCUMENTS: pre: 1077, post: 1077, loss: 0.0% -2025-05-08 14:19:40,687 - INFO - Removed 0 duplicate ids -2025-05-08 14:19:41,554 - INFO - Removed 0 rows with duplicate text -2025-05-08 14:20:20,360 - INFO - 976 Long texts (>~1e5 tokens) found -2025-05-08 14:20:20,379 - INFO - id: digistorting_1906-07_part6_vol-a_part2 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1905-06_part3_vol-a_part3 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1896_part3_vol-c_part1 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1900-01_part3_vol-a_part3 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1894_part1_vol-b_part1 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1905-06_part5_vol-a_part1 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1901-02_part1_vol-c_part0 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1903-04_part3_vol-a_part0 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1897_part6_vol-a_part1 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1897_part1_vol-a_part3 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1894_part7_vol-I_part5 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1902-03_part7_vol-a_part3 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1902-03_part2_vol-a_part1 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1891_part6_vol-a_part5 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1901-02_part8_vol-a_part2 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1908_part7_vol-a_part2 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1899-00_part6_vol-a_part2 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1877_part-stid_vol-2_part2 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1902-03_part1_vol-b_part2 -2025-05-08 14:20:20,379 - INFO - id: digistorting_1892_part1_vol-a_part2 -2025-05-08 14:20:20,385 - INFO - id: digistorting_1908_part3_vol-a_part2 -2025-05-08 14:20:20,385 - INFO - id: digistorting_1893_part7_vol-I_part5 -2025-05-08 14:20:20,385 - INFO - id: digistorting_1901-02_part8_vol-a_part6 -2025-05-08 14:20:20,385 - INFO - id: digistorting_1906-07_part8_vol-a_part4 -2025-05-08 14:20:20,387 - INFO - id: digistorting_1908_part3_vol-a_part1 -2025-05-08 14:20:20,387 - INFO - id: digistorting_1872_part-stid_vol-1_part4 -2025-05-08 14:20:20,387 - INFO - id: digistorting_1908_part7_vol-b_part1 -2025-05-08 14:20:20,388 - INFO - id: digistorting_1890_part7_vol-a_part1 -2025-05-08 14:20:20,388 - INFO - id: digistorting_1900-01_part7_vol-a_part2 -2025-05-08 14:20:20,388 - INFO - id: digistorting_1893_part3_vol-b_part0 -2025-05-08 14:20:20,388 - INFO - id: digistorting_1908_part7_vol-a_part7 -2025-05-08 14:20:20,388 - INFO - id: digistorting_1893_part7_vol-I_part4 -2025-05-08 14:20:20,391 - INFO - id: digistorting_1895_part7_vol-II_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1906-07_part8_vol-a_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1904-05_part5_vol-a_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1891_part3_vol-a_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1896_part3_vol-c_part0 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1893_part7_vol-I_part6 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1897_part1_vol-b_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1898-99_part8_vol-a_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1898-99_part5_vol-a_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1901-02_part3_vol-b_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1902-03_part7_vol-a_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1892_part2_vol-b_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1888_part8_vol-a_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1897_part5_vol-b_part0 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1904-05_part2_vol-b_part0 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1895_part5_vol-a_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1887_part3_vol-a_part0 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1903-04_part8_vol-a_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1893_part3_vol-a_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1908_part5_vol-a_part0 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1886_part1_vol-b_part2 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1902-03_part8_vol-a_part0 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1901-02_part2_vol-a_part1 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1888_part7_vol-a_part5 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1904-05_part7_vol-a_part8 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1905-06_part2_vol-a_part4 -2025-05-08 14:20:20,392 - INFO - id: digistorting_1905-06_part2_vol-a_part3 -2025-05-08 14:20:20,398 - INFO - id: digistorting_1894_part7_vol-II_part6 -2025-05-08 14:20:20,398 - INFO - id: digistorting_1876_part-stid_vol-2_part5 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1901-02_part7_vol-a_part10 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1899-00_part7_vol-a_part4 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1899-00_part8_vol-a_part0 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1889_part7_vol-a_part6 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1886_part-stid_vol-1_part8 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1904-05_part7_vol-a_part3 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1895_part1_vol-a_part2 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1908_part6_vol-a_part1 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1875_part-stid_vol-1_part6 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1904-05_part2_vol-b_part2 -2025-05-08 14:20:20,399 - INFO - id: digistorting_1899-00_part3_vol-a_part0 -2025-05-08 14:20:20,402 - INFO - id: digistorting_1886_part2_vol-a_part1 -2025-05-08 14:20:20,402 - INFO - id: digistorting_1894_part3_vol-a_part1 -2025-05-08 14:20:20,404 - INFO - id: digistorting_1893_part7_vol-I_part3 -2025-05-08 14:20:20,404 - INFO - id: digistorting_1901-02_part3_vol-a_part0 -2025-05-08 14:20:20,404 - INFO - id: digistorting_1897_part7_vol-II_part3 -2025-05-08 14:20:20,406 - INFO - id: digistorting_1890_part2_vol-a_part0 -2025-05-08 14:20:20,406 - INFO - id: digistorting_1894_part8_vol-a_part0 -2025-05-08 14:20:20,406 - INFO - id: digistorting_1887_part-stid_vol-1_part6 -2025-05-08 14:20:20,406 - INFO - id: digistorting_1898_part5_vol-a_part1 -2025-05-08 14:20:20,406 - INFO - id: digistorting_1898-99_part3_vol-a_part0 -2025-05-08 14:20:20,407 - INFO - id: digistorting_1903-04_part7_vol-b_part4 -2025-05-08 14:20:20,407 - INFO - id: digistorting_1898-99_part2_vol-a_part1 -2025-05-08 14:20:20,407 - INFO - id: digistorting_1894_part8_vol-a_part2 -2025-05-08 14:20:20,407 - INFO - id: digistorting_1901-02_part6_vol-a_part1 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1900-01_part2_vol-b_part2 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1911_part2_vol-c_part3 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1868-69_part-stid_vol-2_part0 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1902-03_part8_vol-a_part7 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1908_part7_vol-a_part4 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1906-07_part2_vol-a_part0 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1896_part8_vol-a_part8 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1894_part5_vol-a_part0 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1889_part6_vol-a_part2 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1905-06_part8_vol-a_part1 -2025-05-08 14:20:20,409 - INFO - id: digistorting_1893_part1_vol-a_part1 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1902-03_part8_vol-a_part5 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1888_part7_vol-a_part9 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1906-07_part7_vol-b_part0 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1888_part7_vol-a_part6 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1894_part7_vol-II_part4 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1896_part6_vol-a_part0 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1872_part-stid_vol-1_part1 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1904-05_part6_vol-a_part3 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1898_part1_vol-a_part2 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1910_part3_vol-a_part3 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1908_part7_vol-a_part5 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1895_part2_vol-b_part3 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1891_part2_vol-b_part2 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1894_part8_vol-a_part5 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1905-06_part8_vol-a_part5 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1902-03_part6_vol-a_part4 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1893_part7_vol-II_part1 -2025-05-08 14:20:20,413 - INFO - id: digistorting_1891_part3_vol-b_part0 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1873_part-stid_vol-2_part4 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1896_part3_vol-a_part2 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1886_part-stid_vol-1_part5 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1898-99_part7_vol-a_part6 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1904-05_part6_vol-a_part4 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1905-06_part7_vol-b_part6 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1896_part2_vol-b_part1 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1891_part1_vol-b_part0 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1889_part7_vol-a_part2 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1871_part-stid_vol-1_part0 -2025-05-08 14:20:20,419 - INFO - id: digistorting_1877_part-stid_vol-1_part4 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1898_part2_vol-a_part2 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1897_part8_vol-a_part0 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1901-02_part2_vol-a_part2 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1909_part8_vol-b_part2 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1900-01_part7_vol-a_part8 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1905-06_part6_vol-a_part1 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1902-03_part7_vol-b_part5 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1904-05_part6_vol-b_part0 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1897_part7_vol-II_part5 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1904-05_part3_vol-a_part2 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1902-03_part2_vol-a_part0 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1902-03_part2_vol-b_part1 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1894_part2_vol-a_part0 -2025-05-08 14:20:20,423 - INFO - id: digistorting_1895_part7_vol-II_part0 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1901-02_part7_vol-a_part9 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1875_part-stid_vol-1_part2 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1905-06_part3_vol-b_part4 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1905-06_part3_vol-a_part4 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1909_part7_vol-a_part5 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1895_part8_vol-a_part1 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1906-07_part7_vol-b_part5 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1886_part2_vol-a_part0 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1899-00_part7_vol-a_part8 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1890_part8_vol-a_part4 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1898-99_part3_vol-a_part1 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1896_part5_vol-a_part2 -2025-05-08 14:20:20,429 - INFO - id: digistorting_1901-02_part1_vol-b_part2 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1892_part6_vol-a_part3 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1904-05_part3_vol-a_part0 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1897_part8_vol-a_part1 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1902-03_part7_vol-a_part4 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1888_part8_vol-a_part0 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1909_part8_vol-a_part5 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1905-06_part3_vol-a_part2 -2025-05-08 14:20:20,433 - INFO - id: digistorting_1893_part3_vol-a_part2 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1900-01_part3_vol-a_part2 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1887_part-stid_vol-2_part6 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1890_part7_vol-a_part9 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1897_part8_vol-a_part9 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1874_part-stid_vol-1_part4 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1891_part7_vol-a_part6 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1897_part1_vol-b_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1899-00_part8_vol-a_part1 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1890_part7_vol-a_part5 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1887_part-stid_vol-2_part8 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1886_part1_vol-b_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1895_part2_vol-b_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1900-01_part1_vol-b_part1 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1908_part6_vol-a_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1888_part7_vol-a_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1895_part8_vol-a_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1876_part-stid_vol-2_part1 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1898_part2_vol-a_part0 -2025-05-08 14:20:20,437 - INFO - id: digistorting_1905-06_part3_vol-b_part0 -2025-05-08 14:20:20,444 - INFO - id: digistorting_1909_part3_vol-a_part1 -2025-05-08 14:20:20,444 - INFO - id: digistorting_1902-03_part8_vol-a_part8 -2025-05-08 14:20:20,444 - INFO - id: digistorting_1908_part7_vol-b_part4 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1887_part1_vol-a_part0 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1906-07_part2_vol-a_part2 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1898-99_part3_vol-a_part4 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1897_part5_vol-b_part1 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1896_part3_vol-a_part1 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1902-03_part7_vol-a_part5 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1891_part7_vol-a_part7 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1874_part-stid_vol-2_part1 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1876_part-stid_vol-2_part2 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1906-07_part6_vol-a_part4 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1900-01_part2_vol-b_part1 -2025-05-08 14:20:20,445 - INFO - id: digistorting_1899-00_part6_vol-a_part0 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1890_part1_vol-b_part0 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1908_part8_vol-a_part0 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1898-99_part8_vol-a_part3 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1905-06_part3_vol-b_part2 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1902-03_part8_vol-a_part6 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1906-07_part5_vol-a_part2 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1887_part-stid_vol-1_part2 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1898-99_part6_vol-a_part2 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1873_part-stid_vol-1_part3 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1898_part6_vol-a_part1 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1894_part5_vol-a_part1 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1900-01_part2_vol-b_part0 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1897_part8_vol-a_part7 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1897_part7_vol-II_part4 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1900-01_part7_vol-a_part5 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1889_part2_vol-a_part1 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1899-00_part1_vol-b_part2 -2025-05-08 14:20:20,448 - INFO - id: digistorting_1897_part2_vol-b_part2 -2025-05-08 14:20:20,453 - INFO - id: digistorting_1906-07_part3_vol-a_part2 -2025-05-08 14:20:20,453 - INFO - id: digistorting_1872_part-stid_vol-2_part0 -2025-05-08 14:20:20,453 - INFO - id: digistorting_1891_part3_vol-a_part1 -2025-05-08 14:20:20,453 - INFO - id: digistorting_1906-07_part7_vol-a_part5 -2025-05-08 14:20:20,453 - INFO - id: digistorting_1896_part6_vol-a_part2 -2025-05-08 14:20:20,453 - INFO - id: digistorting_1908_part6_vol-b_part1 -2025-05-08 14:20:20,457 - INFO - id: digistorting_1898-99_part8_vol-a_part0 -2025-05-08 14:20:20,458 - INFO - id: digistorting_1888_part8_vol-a_part4 -2025-05-08 14:20:20,459 - INFO - id: digistorting_1891_part7_vol-a_part3 -2025-05-08 14:20:20,459 - INFO - id: digistorting_1886_part2_vol-c_part1 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1901-02_part6_vol-a_part0 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1876_part-stid_vol-1_part3 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1893_part1_vol-b_part0 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1899-00_part7_vol-a_part1 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1905-06_part7_vol-b_part0 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1900-01_part7_vol-a_part9 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1894_part2_vol-b_part0 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1892_part8_vol-a_part0 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1909_part7_vol-a_part4 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1901-02_part6_vol-b_part1 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1890_part1_vol-a_part0 -2025-05-08 14:20:20,460 - INFO - id: digistorting_1906-07_part2_vol-b_part1 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1904-05_part3_vol-a_part1 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1903-04_part6_vol-a_part1 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1898_part2_vol-b_part0 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1886_part-stid_vol-1_part3 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1894_part6_vol-a_part3 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1893_part7_vol-I_part1 -2025-05-08 14:20:20,463 - INFO - id: digistorting_1894_part7_vol-I_part1 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1906-07_part1_vol-b_part0 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1873_part-stid_vol-2_part0 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1906-07_part2_vol-b_part0 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1898-99_part8_vol-a_part6 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1896_part3_vol-b_part3 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1902-03_part3_vol-a_part0 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1894_part6_vol-a_part2 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1872_part-stid_vol-1_part3 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1906-07_part7_vol-a_part7 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1902-03_part7_vol-b_part2 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1892_part7_vol-a_part3 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1901-02_part6_vol-a_part3 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1890_part3_vol-a_part3 -2025-05-08 14:20:20,465 - INFO - id: digistorting_1901-02_part1_vol-b_part1 -2025-05-08 14:20:20,470 - INFO - id: digistorting_1892_part6_vol-a_part2 -2025-05-08 14:20:20,470 - INFO - id: digistorting_1893_part5_vol-a_part0 -2025-05-08 14:20:20,470 - INFO - id: digistorting_1903-04_part7_vol-b_part5 -2025-05-08 14:20:20,470 - INFO - id: digistorting_1896_part3_vol-c_part3 -2025-05-08 14:20:20,470 - INFO - id: digistorting_1897_part8_vol-a_part4 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1890_part3_vol-a_part0 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1910_part3_vol-a_part1 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1905-06_part8_vol-a_part4 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1897_part2_vol-a_part1 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1902-03_part6_vol-a_part0 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1891_part1_vol-a_part1 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1895_part6_vol-a_part0 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1905-06_part8_vol-a_part3 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1900-01_part7_vol-a_part7 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1898-99_part8_vol-a_part7 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1894_part6_vol-a_part0 -2025-05-08 14:20:20,472 - INFO - id: digistorting_1889_part2_vol-b_part1 -2025-05-08 14:20:20,476 - INFO - id: digistorting_1893_part8_vol-a_part0 -2025-05-08 14:20:20,476 - INFO - id: digistorting_1898-99_part5_vol-a_part1 -2025-05-08 14:20:20,476 - INFO - id: digistorting_1909_part8_vol-a_part3 -2025-05-08 14:20:20,476 - INFO - id: digistorting_1898-99_part1_vol-c_part0 -2025-05-08 14:20:20,476 - INFO - id: digistorting_1871_part-stid_vol-1_part3 -2025-05-08 14:20:20,476 - INFO - id: digistorting_1909_part7_vol-a_part3 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1892_part7_vol-a_part4 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1887_part-stid_vol-2_part2 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1904-05_part7_vol-b_part2 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1899-00_part6_vol-a_part1 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1904-05_part7_vol-b_part3 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1894_part7_vol-II_part3 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1902-03_part6_vol-a_part2 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1904-05_part2_vol-b_part1 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1902-03_part7_vol-b_part3 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1906-07_part2_vol-a_part1 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1905-06_part7_vol-a_part1 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1895_part1_vol-a_part1 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1893_part7_vol-II_part2 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1886_part-stid_vol-1_part2 -2025-05-08 14:20:20,479 - INFO - id: digistorting_1908_part7_vol-b_part0 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1890_part7_vol-a_part7 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1906-07_part3_vol-a_part1 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1891_part6_vol-a_part0 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1874_part-stid_vol-1_part0 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1906-07_part1_vol-b_part3 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1886_part-stid_vol-1_part7 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1902-03_part6_vol-b_part1 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1889_part1_vol-a_part0 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1888_part7_vol-a_part2 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1899-00_part6_vol-a_part4 -2025-05-08 14:20:20,482 - INFO - id: digistorting_1893_part2_vol-b_part1 -2025-05-08 14:20:20,486 - INFO - id: digistorting_1906-07_part1_vol-a_part2 -2025-05-08 14:20:20,486 - INFO - id: digistorting_1908_part2_vol-a_part1 -2025-05-08 14:20:20,486 - INFO - id: digistorting_1894_part7_vol-I_part6 -2025-05-08 14:20:20,486 - INFO - id: digistorting_1911_part3_vol-a_part0 -2025-05-08 14:20:20,487 - INFO - id: digistorting_1868-69_part-stid_vol-2_part2 -2025-05-08 14:20:20,487 - INFO - id: digistorting_1886_part-stid_vol-2_part2 -2025-05-08 14:20:20,488 - INFO - id: digistorting_1888_part8_vol-a_part3 -2025-05-08 14:20:20,488 - INFO - id: digistorting_1874_part-stid_vol-2_part3 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1897_part2_vol-b_part0 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1909_part6_vol-b_part3 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1891_part8_vol-a_part3 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1891_part3_vol-b_part1 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1890_part7_vol-a_part4 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1892_part7_vol-a_part2 -2025-05-08 14:20:20,490 - INFO - id: digistorting_1895_part6_vol-a_part1 -2025-05-08 14:20:20,493 - INFO - id: digistorting_1904-05_part8_vol-a_part1 -2025-05-08 14:20:20,493 - INFO - id: digistorting_1889_part7_vol-a_part7 -2025-05-08 14:20:20,494 - INFO - id: digistorting_1891_part8_vol-a_part2 -2025-05-08 14:20:20,494 - INFO - id: digistorting_1898-99_part6_vol-a_part1 -2025-05-08 14:20:20,494 - INFO - id: digistorting_1884_part4_vol-a_part1 -2025-05-08 14:20:20,494 - INFO - id: digistorting_1902-03_part2_vol-a_part2 -2025-05-08 14:20:20,494 - INFO - id: digistorting_1909_part6_vol-b_part0 -2025-05-08 14:20:20,494 - INFO - id: digistorting_1908_part2_vol-a_part2 -2025-05-08 14:20:20,495 - INFO - id: digistorting_1905-06_part2_vol-a_part0 -2025-05-08 14:20:20,495 - INFO - id: digistorting_1887_part-stid_vol-2_part5 -2025-05-08 14:20:20,495 - INFO - id: digistorting_1905-06_part3_vol-a_part0 -2025-05-08 14:20:20,495 - INFO - id: digistorting_1908_part3_vol-a_part0 -2025-05-08 14:20:20,495 - INFO - id: digistorting_1900-01_part7_vol-a_part10 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1899-00_part7_vol-a_part7 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1876_part-stid_vol-1_part0 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1903-04_part2_vol-a_part1 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1903-04_part3_vol-a_part1 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1891_part1_vol-b_part1 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1910_part3_vol-a_part4 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1909_part8_vol-a_part7 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1909_part7_vol-b_part5 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1909_part8_vol-a_part8 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1909_part8_vol-a_part4 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1875_part-stid_vol-1_part0 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1897_part8_vol-a_part2 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1896_part2_vol-b_part0 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1903-04_part1_vol-b_part2 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1897_part1_vol-a_part0 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1909_part6_vol-b_part1 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1890_part7_vol-a_part0 -2025-05-08 14:20:20,497 - INFO - id: digistorting_1909_part6_vol-a1_part1 -2025-05-08 14:20:20,503 - INFO - id: digistorting_1901-02_part2_vol-a_part0 -2025-05-08 14:20:20,503 - INFO - id: digistorting_1873_part-stid_vol-2_part1 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1902-03_part2_vol-b_part2 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1901-02_part7_vol-a_part1 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1909_part7_vol-a_part1 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1902-03_part5_vol-a_part1 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1899-00_part5_vol-a_part0 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1901-02_part7_vol-a_part3 -2025-05-08 14:20:20,504 - INFO - id: digistorting_1874_part-stid_vol-2_part2 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1898-99_part2_vol-b_part1 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1894_part8_vol-a_part6 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1909_part7_vol-b_part6 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1889_part2_vol-a_part0 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1904-05_part7_vol-a_part7 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1897_part6_vol-a_part4 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1901-02_part7_vol-a_part8 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1900-01_part8_vol-a_part0 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1898_part3_vol-a_part3 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1902-03_part1_vol-b_part1 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1908_part6_vol-a2_part0 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1908_part6_vol-a2_part1 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1906-07_part7_vol-a_part4 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1895_part6_vol-b_part0 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1902-03_part1_vol-c_part0 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1896_part8_vol-a_part7 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1903-04_part3_vol-a_part2 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1906-07_part8_vol-a_part5 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1888_part3_vol-b_part4 -2025-05-08 14:20:20,507 - INFO - id: digistorting_1891_part6_vol-a_part4 -2025-05-08 14:20:20,513 - INFO - id: digistorting_1893_part8_vol-a_part2 -2025-05-08 14:20:20,513 - INFO - id: digistorting_1892_part2_vol-a_part0 -2025-05-08 14:20:20,513 - INFO - id: digistorting_1873_part-stid_vol-1_part4 -2025-05-08 14:20:20,513 - INFO - id: digistorting_1891_part8_vol-a_part4 -2025-05-08 14:20:20,514 - INFO - id: digistorting_1899-00_part6_vol-b_part1 -2025-05-08 14:20:20,514 - INFO - id: digistorting_1895_part1_vol-b_part1 -2025-05-08 14:20:20,514 - INFO - id: digistorting_1901-02_part3_vol-a_part2 -2025-05-08 14:20:20,514 - INFO - id: digistorting_1892_part3_vol-a_part0 -2025-05-08 14:20:20,515 - INFO - id: digistorting_1901-02_part8_vol-a_part5 -2025-05-08 14:20:20,515 - INFO - id: digistorting_1908_part2_vol-a_part0 -2025-05-08 14:20:20,515 - INFO - id: digistorting_1894_part7_vol-II_part0 -2025-05-08 14:20:20,515 - INFO - id: digistorting_1895_part2_vol-a_part0 -2025-05-08 14:20:20,517 - INFO - id: digistorting_1892_part3_vol-b_part0 -2025-05-08 14:20:20,517 - INFO - id: digistorting_1887_part-stid_vol-2_part3 -2025-05-08 14:20:20,518 - INFO - id: digistorting_1901-02_part7_vol-a_part6 -2025-05-08 14:20:20,518 - INFO - id: digistorting_1905-06_part8_vol-a_part0 -2025-05-08 14:20:20,518 - INFO - id: digistorting_1892_part6_vol-b_part0 -2025-05-08 14:20:20,519 - INFO - id: digistorting_1900-01_part5_vol-a_part0 -2025-05-08 14:20:20,519 - INFO - id: digistorting_1898-99_part7_vol-a_part5 -2025-05-08 14:20:20,519 - INFO - id: digistorting_1896_part8_vol-a_part2 -2025-05-08 14:20:20,520 - INFO - id: digistorting_1894_part3_vol-a_part2 -2025-05-08 14:20:20,520 - INFO - id: digistorting_1909_part6_vol-a2_part2 -2025-05-08 14:20:20,521 - INFO - id: digistorting_1872_part-stid_vol-1_part2 -2025-05-08 14:20:20,521 - INFO - id: digistorting_1906-07_part7_vol-a_part2 -2025-05-08 14:20:20,522 - INFO - id: digistorting_1905-06_part5_vol-a_part0 -2025-05-08 14:20:20,522 - INFO - id: digistorting_1891_part8_vol-a_part1 -2025-05-08 14:20:20,522 - INFO - id: digistorting_1892_part7_vol-a_part1 -2025-05-08 14:20:20,522 - INFO - id: digistorting_1902-03_part1_vol-c_part1 -2025-05-08 14:20:20,522 - INFO - id: digistorting_1897_part3_vol-a_part1 -2025-05-08 14:20:20,525 - INFO - id: digistorting_1905-06_part7_vol-a_part5 -2025-05-08 14:20:20,525 - INFO - id: digistorting_1889_part2_vol-b_part0 -2025-05-08 14:20:20,525 - INFO - id: digistorting_1898-99_part8_vol-a_part5 -2025-05-08 14:20:20,525 - INFO - id: digistorting_1909_part6_vol-a2_part1 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1896_part8_vol-a_part3 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1890_part6_vol-a_part4 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1902-03_part7_vol-b_part1 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1868-69_part-stid_vol-2_part4 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1904-05_part7_vol-b_part8 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1905-06_part6_vol-b_part1 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1909_part3_vol-a_part2 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1893_part6_vol-b_part0 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1887_part-stid_vol-2_part0 -2025-05-08 14:20:20,526 - INFO - id: digistorting_1906-07_part7_vol-a_part1 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1892_part3_vol-b_part1 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1893_part2_vol-b_part0 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1890_part2_vol-b_part2 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1909_part7_vol-b_part7 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1890_part2_vol-b_part0 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1899-00_part5_vol-a_part1 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1898_part1_vol-b_part1 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1894_part7_vol-I_part2 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1902-03_part6_vol-b_part0 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1892_part1_vol-a_part0 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1894_part7_vol-II_part5 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1895_part7_vol-II_part4 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1895_part2_vol-b_part1 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1890_part3_vol-a_part1 -2025-05-08 14:20:20,529 - INFO - id: digistorting_1904-05_part3_vol-a_part4 -2025-05-08 14:20:20,535 - INFO - id: digistorting_1897_part5_vol-a_part1 -2025-05-08 14:20:20,535 - INFO - id: digistorting_1904-05_part3_vol-a_part3 -2025-05-08 14:20:20,535 - INFO - id: digistorting_1901-02_part8_vol-a_part0 -2025-05-08 14:20:20,535 - INFO - id: digistorting_1906-07_part7_vol-b_part9 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1904-05_part1_vol-b_part1 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1893_part8_vol-a_part3 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1903-04_part7_vol-a_part3 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1876_part-stid_vol-2_part0 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1904-05_part7_vol-b_part4 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1876_part-stid_vol-1_part2 -2025-05-08 14:20:20,537 - INFO - id: digistorting_1911_part3_vol-a_part2 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1888_part2_vol-b_part0 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1891_part8_vol-a_part0 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1904-05_part8_vol-a_part2 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1898_part3_vol-a_part1 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1887_part1_vol-a_part1 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1903-04_part2_vol-a_part3 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1906-07_part7_vol-b_part2 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1891_part6_vol-a_part2 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1887_part3_vol-a_part2 -2025-05-08 14:20:20,540 - INFO - id: digistorting_1908_part8_vol-a_part3 -2025-05-08 14:20:20,545 - INFO - id: digistorting_1903-04_part1_vol-b_part0 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1898-99_part3_vol-a_part3 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1890_part1_vol-b_part2 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1906-07_part7_vol-a_part3 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1902-03_part3_vol-a_part3 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1909_part7_vol-b_part3 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1894_part8_vol-a_part1 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1893_part2_vol-a_part0 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1888_part3_vol-b_part2 -2025-05-08 14:20:20,546 - INFO - id: digistorting_1889_part6_vol-a_part3 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1906-07_part3_vol-a_part0 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1874_part-stid_vol-1_part2 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1898-99_part1_vol-b_part1 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1892_part1_vol-b_part3 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1894_part6_vol-a_part1 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1898_part2_vol-b_part1 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1895_part1_vol-a_part0 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1898_part5_vol-a_part0 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1902-03_part8_vol-a_part4 -2025-05-08 14:20:20,549 - INFO - id: digistorting_1891_part7_vol-a_part1 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1892_part3_vol-a_part1 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1892_part7_vol-a_part7 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1896_part1_vol-b_part0 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1875_part-stid_vol-1_part3 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1893_part1_vol-a_part2 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1895_part7_vol-II_part2 -2025-05-08 14:20:20,553 - INFO - id: digistorting_1908_part8_vol-a_part1 -2025-05-08 14:20:20,557 - INFO - id: digistorting_1898_part6_vol-a_part0 -2025-05-08 14:20:20,557 - INFO - id: digistorting_1905-06_part7_vol-a_part2 -2025-05-08 14:20:20,558 - INFO - id: digistorting_1903-04_part1_vol-c_part1 -2025-05-08 14:20:20,559 - INFO - id: digistorting_1902-03_part6_vol-b_part2 -2025-05-08 14:20:20,559 - INFO - id: digistorting_1906-07_part7_vol-b_part6 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1902-03_part8_vol-a_part9 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1893_part8_vol-a_part1 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1905-06_part7_vol-b_part5 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1910_part3_vol-a_part2 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1888_part8_vol-a_part5 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1896_part2_vol-a_part0 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1906-07_part7_vol-a_part6 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1875_part-stid_vol-2_part2 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1898_part8_vol-a_part0 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1901-02_part7_vol-a_part11 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1899-00_part1_vol-b_part1 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1888_part2_vol-b_part2 -2025-05-08 14:20:20,560 - INFO - id: digistorting_1894_part5_vol-a_part2 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1902-03_part1_vol-b_part0 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1877_part-stid_vol-1_part3 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1895_part6_vol-a_part2 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1894_part8_vol-a_part3 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1893_part7_vol-II_part0 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1877_part-stid_vol-2_part1 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1887_part1_vol-a_part2 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1901-02_part1_vol-c_part1 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1909_part8_vol-a_part2 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1874_part-stid_vol-2_part5 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1893_part5_vol-a_part1 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1903-04_part8_vol-a_part4 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1898_part6_vol-a_part3 -2025-05-08 14:20:20,567 - INFO - id: digistorting_1911_part3_vol-a_part1 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1902-03_part8_vol-a_part2 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1888_part2_vol-a_part3 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1902-03_part7_vol-a_part2 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1904-05_part7_vol-a_part2 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1903-04_part7_vol-a_part1 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1900-01_part7_vol-a_part3 -2025-05-08 14:20:20,574 - INFO - id: digistorting_1871_part-stid_vol-1_part1 -2025-05-08 14:20:20,578 - INFO - id: digistorting_1908_part6_vol-a2_part2 -2025-05-08 14:20:20,578 - INFO - id: digistorting_1892_part2_vol-a_part2 -2025-05-08 14:20:20,579 - INFO - id: digistorting_1893_part6_vol-a_part3 -2025-05-08 14:20:20,579 - INFO - id: digistorting_1904-05_part7_vol-a_part5 -2025-05-08 14:20:20,579 - INFO - id: digistorting_1886_part1_vol-b_part1 -2025-05-08 14:20:20,579 - INFO - id: digistorting_1898_part6_vol-a_part2 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1901-02_part3_vol-a_part1 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1900-01_part8_vol-a_part2 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1908_part7_vol-a_part3 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1888_part1_vol-b_part1 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1901-02_part6_vol-b_part0 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1888_part7_vol-a_part4 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1876_part-stid_vol-2_part4 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1898_part1_vol-c_part1 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1890_part7_vol-a_part8 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1896_part3_vol-b_part1 -2025-05-08 14:20:20,581 - INFO - id: digistorting_1909_part6_vol-a1_part0 -2025-05-08 14:20:20,585 - INFO - id: digistorting_1909_part7_vol-a_part7 -2025-05-08 14:20:20,585 - INFO - id: digistorting_1899-00_part8_vol-a_part6 -2025-05-08 14:20:20,585 - INFO - id: digistorting_1890_part7_vol-a_part10 -2025-05-08 14:20:20,587 - INFO - id: digistorting_1906-07_part7_vol-b_part8 -2025-05-08 14:20:20,587 - INFO - id: digistorting_1906-07_part8_vol-a_part0 -2025-05-08 14:20:20,587 - INFO - id: digistorting_1893_part5_vol-a_part2 -2025-05-08 14:20:20,587 - INFO - id: digistorting_1894_part6_vol-a_part4 -2025-05-08 14:20:20,589 - INFO - id: digistorting_1896_part7_vol-II_part0 -2025-05-08 14:20:20,589 - INFO - id: digistorting_1899-00_part7_vol-a_part3 -2025-05-08 14:20:20,589 - INFO - id: digistorting_1897_part8_vol-a_part8 -2025-05-08 14:20:20,589 - INFO - id: digistorting_1891_part6_vol-a_part3 -2025-05-08 14:20:20,591 - INFO - id: digistorting_1905-06_part1_vol-b_part3 -2025-05-08 14:20:20,591 - INFO - id: digistorting_1897_part8_vol-a_part5 -2025-05-08 14:20:20,591 - INFO - id: digistorting_1897_part8_vol-a_part3 -2025-05-08 14:20:20,591 - INFO - id: digistorting_1890_part7_vol-a_part2 -2025-05-08 14:20:20,593 - INFO - id: digistorting_1902-03_part6_vol-a_part3 -2025-05-08 14:20:20,593 - INFO - id: digistorting_1909_part8_vol-b_part1 -2025-05-08 14:20:20,593 - INFO - id: digistorting_1895_part2_vol-a_part1 -2025-05-08 14:20:20,593 - INFO - id: digistorting_1896_part6_vol-a_part3 -2025-05-08 14:20:20,593 - INFO - id: digistorting_1874_part-stid_vol-1_part5 -2025-05-08 14:20:20,593 - INFO - id: digistorting_1909_part7_vol-b_part2 -2025-05-08 14:20:20,595 - INFO - id: digistorting_1889_part7_vol-a_part4 -2025-05-08 14:20:20,595 - INFO - id: digistorting_1898-99_part2_vol-b_part0 -2025-05-08 14:20:20,595 - INFO - id: digistorting_1892_part2_vol-a_part1 -2025-05-08 14:20:20,595 - INFO - id: digistorting_1891_part7_vol-a_part2 -2025-05-08 14:20:20,595 - INFO - id: digistorting_1904-05_part7_vol-b_part7 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1873_part-stid_vol-1_part0 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1895_part2_vol-a_part2 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1887_part-stid_vol-2_part1 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1896_part8_vol-a_part4 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1898-99_part6_vol-b_part1 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1891_part1_vol-a_part2 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1889_part6_vol-a_part1 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1891_part1_vol-b_part2 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1897_part7_vol-II_part2 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1897_part6_vol-a_part2 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1887_part-stid_vol-1_part0 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1898-99_part7_vol-a_part1 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1894_part3_vol-a_part0 -2025-05-08 14:20:20,597 - INFO - id: digistorting_1896_part7_vol-II_part1 -2025-05-08 14:20:20,603 - INFO - id: digistorting_1905-06_part7_vol-b_part2 -2025-05-08 14:20:20,603 - INFO - id: digistorting_1895_part1_vol-b_part0 -2025-05-08 14:20:20,603 - INFO - id: digistorting_1875_part-stid_vol-2_part0 -2025-05-08 14:20:20,603 - INFO - id: digistorting_1886_part-stid_vol-1_part0 -2025-05-08 14:20:20,603 - INFO - id: digistorting_1868-69_part-stid_vol-2_part1 -2025-05-08 14:20:20,603 - INFO - id: digistorting_1897_part5_vol-a_part0 -2025-05-08 14:20:20,607 - INFO - id: digistorting_1892_part8_vol-a_part1 -2025-05-08 14:20:20,607 - INFO - id: digistorting_1893_part3_vol-a_part0 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1911_part2_vol-c_part2 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1898-99_part2_vol-a_part0 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1885_part3_vol-a -2025-05-08 14:20:20,608 - INFO - id: digistorting_1874_part-stid_vol-2_part4 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1908_part3_vol-a_part4 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1905-06_part8_vol-a_part6 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1896_part7_vol-II_part2 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1886_part-stid_vol-1_part1 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1893_part7_vol-I_part2 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1909_part8_vol-a_part1 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1890_part8_vol-a_part2 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1894_part6_vol-b_part0 -2025-05-08 14:20:20,608 - INFO - id: digistorting_1888_part8_vol-a_part1 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1896_part6_vol-a_part1 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1894_part6_vol-b_part1 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1872_part-stid_vol-2_part2 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1902-03_part3_vol-a_part2 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1897_part8_vol-a_part6 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1896_part8_vol-a_part6 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1891_part2_vol-a_part1 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1908_part5_vol-a_part1 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1893_part2_vol-a_part1 -2025-05-08 14:20:20,614 - INFO - id: digistorting_1899-00_part2_vol-b_part2 -2025-05-08 14:20:20,618 - INFO - id: digistorting_1900-01_part3_vol-a_part1 -2025-05-08 14:20:20,618 - INFO - id: digistorting_1888_part3_vol-b_part3 -2025-05-08 14:20:20,618 - INFO - id: digistorting_1903-04_part6_vol-a_part0 -2025-05-08 14:20:20,618 - INFO - id: digistorting_1906-07_part7_vol-b_part7 -2025-05-08 14:20:20,620 - INFO - id: digistorting_1897_part2_vol-a_part3 -2025-05-08 14:20:20,620 - INFO - id: digistorting_1905-06_part3_vol-b_part3 -2025-05-08 14:20:20,620 - INFO - id: digistorting_1899-00_part1_vol-a_part2 -2025-05-08 14:20:20,622 - INFO - id: digistorting_1898_part3_vol-a_part2 -2025-05-08 14:20:20,622 - INFO - id: digistorting_1889_part3_vol-a_part0 -2025-05-08 14:20:20,622 - INFO - id: digistorting_1898_part1_vol-b_part0 -2025-05-08 14:20:20,622 - INFO - id: digistorting_1890_part6_vol-a_part0 -2025-05-08 14:20:20,624 - INFO - id: digistorting_1905-06_part3_vol-b_part1 -2025-05-08 14:20:20,624 - INFO - id: digistorting_1909_part8_vol-b_part0 -2025-05-08 14:20:20,625 - INFO - id: digistorting_1908_part5_vol-a_part2 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1905-06_part7_vol-b_part4 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1905-06_part6_vol-a_part0 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1905-06_part6_vol-a_part2 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1892_part3_vol-a_part2 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1891_part2_vol-a_part2 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1877_part-stid_vol-1_part0 -2025-05-08 14:20:20,626 - INFO - id: digistorting_1898-99_part3_vol-a_part2 -2025-05-08 14:20:20,629 - INFO - id: digistorting_1872_part-stid_vol-1_part0 -2025-05-08 14:20:20,629 - INFO - id: digistorting_1909_part3_vol-b_part0 -2025-05-08 14:20:20,629 - INFO - id: digistorting_1908_part7_vol-a_part1 -2025-05-08 14:20:20,629 - INFO - id: digistorting_1899-00_part8_vol-a_part2 -2025-05-08 14:20:20,629 - INFO - id: digistorting_1889_part1_vol-b_part1 -2025-05-08 14:20:20,629 - INFO - id: digistorting_1898_part1_vol-a_part1 -2025-05-08 14:20:20,632 - INFO - id: digistorting_1898-99_part2_vol-b_part3 -2025-05-08 14:20:20,632 - INFO - id: digistorting_1901-02_part6_vol-a_part2 -2025-05-08 14:20:20,632 - INFO - id: digistorting_1877_part-stid_vol-1_part2 -2025-05-08 14:20:20,632 - INFO - id: digistorting_1909_part6_vol-b_part2 -2025-05-08 14:20:20,632 - INFO - id: digistorting_1909_part6_vol-a2_part0 -2025-05-08 14:20:20,634 - INFO - id: digistorting_1893_part8_vol-a_part4 -2025-05-08 14:20:20,634 - INFO - id: digistorting_1899-00_part8_vol-a_part3 -2025-05-08 14:20:20,634 - INFO - id: digistorting_1894_part8_vol-a_part7 -2025-05-08 14:20:20,634 - INFO - id: digistorting_1890_part2_vol-a_part1 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1903-04_part7_vol-a_part0 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1900-01_part1_vol-c_part0 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1896_part1_vol-b_part2 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1887_part3_vol-a_part1 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1905-06_part3_vol-a_part1 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1885_part4_vol-a_part1 -2025-05-08 14:20:20,636 - INFO - id: digistorting_1905-06_part7_vol-b_part1 -2025-05-08 14:20:20,640 - INFO - id: digistorting_1909_part8_vol-a_part0 -2025-05-08 14:20:20,640 - INFO - id: digistorting_1898-99_part2_vol-b_part2 -2025-05-08 14:20:20,641 - INFO - id: digistorting_1897_part2_vol-a_part2 -2025-05-08 14:20:20,641 - INFO - id: digistorting_1894_part2_vol-a_part1 -2025-05-08 14:20:20,642 - INFO - id: digistorting_1890_part7_vol-a_part3 -2025-05-08 14:20:20,642 - INFO - id: digistorting_1897_part6_vol-a_part0 -2025-05-08 14:20:20,642 - INFO - id: digistorting_1904-05_part7_vol-a_part4 -2025-05-08 14:20:20,642 - INFO - id: digistorting_1899-00_part2_vol-a_part2 -2025-05-08 14:20:20,642 - INFO - id: digistorting_1906-07_part6_vol-a_part0 -2025-05-08 14:20:20,642 - INFO - id: digistorting_1900-01_part7_vol-a_part1 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1895_part2_vol-b_part2 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1911_part2_vol-c_part1 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1890_part3_vol-a_part2 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1911_part3_vol-a_part3 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1909_part7_vol-a_part2 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1894_part7_vol-I_part4 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1895_part7_vol-II_part5 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1905-06_part7_vol-b_part3 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1908_part6_vol-b_part0 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1899-00_part2_vol-b_part1 -2025-05-08 14:20:20,644 - INFO - id: digistorting_1890_part8_vol-a_part1 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1903-04_part7_vol-b_part6 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1890_part6_vol-a_part1 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1899-00_part7_vol-a_part2 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1898_part1_vol-c_part0 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1888_part1_vol-b_part0 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1900-01_part6_vol-a_part2 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1903-04_part7_vol-a_part5 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1898-99_part6_vol-a_part0 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1889_part4_vol-a_part1 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1896_part8_vol-a_part0 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1902-03_part8_vol-a_part1 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1871_part-stid_vol-2_part0 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1901-02_part1_vol-b_part0 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1896_part7_vol-II_part3 -2025-05-08 14:20:20,650 - INFO - id: digistorting_1891_part2_vol-b_part0 -2025-05-08 14:20:20,657 - INFO - id: digistorting_1896_part3_vol-c_part2 -2025-05-08 14:20:20,658 - INFO - id: digistorting_1899-00_part2_vol-b_part0 -2025-05-08 14:20:20,658 - INFO - id: digistorting_1901-02_part3_vol-b_part0 -2025-05-08 14:20:20,658 - INFO - id: digistorting_1899-00_part8_vol-a_part4 -2025-05-08 14:20:20,658 - INFO - id: digistorting_1899-00_part7_vol-a_part5 -2025-05-08 14:20:20,659 - INFO - id: digistorting_1876_part-stid_vol-2_part3 -2025-05-08 14:20:20,659 - INFO - id: digistorting_1892_part7_vol-a_part8 -2025-05-08 14:20:20,659 - INFO - id: digistorting_1909_part6_vol-a1_part2 -2025-05-08 14:20:20,659 - INFO - id: digistorting_1903-04_part8_vol-a_part5 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1903-04_part6_vol-a_part2 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1886_part-stid_vol-2_part0 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1906-07_part6_vol-a_part3 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1887_part-stid_vol-1_part5 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1902-03_part8_vol-a_part3 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1875_part-stid_vol-1_part4 -2025-05-08 14:20:20,661 - INFO - id: digistorting_1905-06_part2_vol-a_part1 -2025-05-08 14:20:20,664 - INFO - id: digistorting_1873_part-stid_vol-1_part1 -2025-05-08 14:20:20,664 - INFO - id: digistorting_1902-03_part2_vol-b_part0 -2025-05-08 14:20:20,664 - INFO - id: digistorting_1871_part-stid_vol-2_part1 -2025-05-08 14:20:20,664 - INFO - id: digistorting_1890_part8_vol-a_part0 -2025-05-08 14:20:20,664 - INFO - id: digistorting_1891_part2_vol-a_part0 -2025-05-08 14:20:20,664 - INFO - id: digistorting_1896_part8_vol-a_part5 -2025-05-08 14:20:20,667 - INFO - id: digistorting_1887_part-stid_vol-2_part4 -2025-05-08 14:20:20,667 - INFO - id: digistorting_1896_part2_vol-a_part1 -2025-05-08 14:20:20,667 - INFO - id: digistorting_1905-06_part1_vol-b_part0 -2025-05-08 14:20:20,667 - INFO - id: digistorting_1903-04_part7_vol-a_part2 -2025-05-08 14:20:20,667 - INFO - id: digistorting_1904-05_part8_vol-a_part0 -2025-05-08 14:20:20,669 - INFO - id: digistorting_1908_part7_vol-b_part2 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1897_part7_vol-II_part1 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1888_part3_vol-b_part0 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1900-01_part7_vol-a_part6 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1908_part8_vol-a_part5 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1899-00_part6_vol-b_part0 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1877_part-stid_vol-2_part0 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1904-05_part7_vol-a_part1 -2025-05-08 14:20:20,670 - INFO - id: digistorting_1909_part3_vol-b_part2 -2025-05-08 14:20:20,675 - INFO - id: digistorting_1892_part7_vol-a_part5 -2025-05-08 14:20:20,675 - INFO - id: digistorting_1892_part1_vol-b_part0 -2025-05-08 14:20:20,675 - INFO - id: digistorting_1898_part6_vol-a_part4 -2025-05-08 14:20:20,676 - INFO - id: digistorting_1908_part7_vol-b_part7 -2025-05-08 14:20:20,676 - INFO - id: digistorting_1894_part7_vol-II_part1 -2025-05-08 14:20:20,677 - INFO - id: digistorting_1887_part-stid_vol-1_part3 -2025-05-08 14:20:20,677 - INFO - id: digistorting_1904-05_part2_vol-a_part0 -2025-05-08 14:20:20,677 - INFO - id: digistorting_1872_part-stid_vol-2_part3 -2025-05-08 14:20:20,678 - INFO - id: digistorting_1892_part8_vol-a_part4 -2025-05-08 14:20:20,678 - INFO - id: digistorting_1900-01_part3_vol-a_part0 -2025-05-08 14:20:20,678 - INFO - id: digistorting_1898-99_part8_vol-a_part8 -2025-05-08 14:20:20,680 - INFO - id: digistorting_1890_part1_vol-b_part1 -2025-05-08 14:20:20,680 - INFO - id: digistorting_1893_part6_vol-b_part1 -2025-05-08 14:20:20,681 - INFO - id: digistorting_1890_part7_vol-a_part6 -2025-05-08 14:20:20,682 - INFO - id: digistorting_1905-06_part8_vol-a_part2 -2025-05-08 14:20:20,682 - INFO - id: digistorting_1897_part3_vol-a_part2 -2025-05-08 14:20:20,683 - INFO - id: digistorting_1898-99_part6_vol-b_part0 -2025-05-08 14:20:20,683 - INFO - id: digistorting_1909_part7_vol-b_part4 -2025-05-08 14:20:20,684 - INFO - id: digistorting_1893_part3_vol-b_part1 -2025-05-08 14:20:20,684 - INFO - id: digistorting_1903-04_part7_vol-b_part2 -2025-05-08 14:20:20,684 - INFO - id: digistorting_1889_part7_vol-a_part0 -2025-05-08 14:20:20,684 - INFO - id: digistorting_1898-99_part7_vol-a_part8 -2025-05-08 14:20:20,687 - INFO - id: digistorting_1893_part6_vol-a_part2 -2025-05-08 14:20:20,687 - INFO - id: digistorting_1901-02_part8_vol-a_part1 -2025-05-08 14:20:20,687 - INFO - id: digistorting_1897_part6_vol-b_part0 -2025-05-08 14:20:20,687 - INFO - id: digistorting_1874_part-stid_vol-1_part1 -2025-05-08 14:20:20,687 - INFO - id: digistorting_1906-07_part7_vol-b_part3 -2025-05-08 14:20:20,689 - INFO - id: digistorting_1896_part2_vol-b_part2 -2025-05-08 14:20:20,690 - INFO - id: digistorting_1903-04_part8_vol-a_part3 -2025-05-08 14:20:20,690 - INFO - id: digistorting_1892_part6_vol-a_part0 -2025-05-08 14:20:20,691 - INFO - id: digistorting_1895_part7_vol-II_part3 -2025-05-08 14:20:20,691 - INFO - id: digistorting_1896_part7_vol-II_part4 -2025-05-08 14:20:20,692 - INFO - id: digistorting_1906-07_part7_vol-a_part8 -2025-05-08 14:20:20,692 - INFO - id: digistorting_1897_part3_vol-a_part0 -2025-05-08 14:20:20,692 - INFO - id: digistorting_1891_part3_vol-b_part2 -2025-05-08 14:20:20,692 - INFO - id: digistorting_1901-02_part7_vol-a_part5 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1893_part7_vol-II_part3 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1891_part7_vol-a_part5 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1898-99_part2_vol-a_part2 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1906-07_part3_vol-a_part3 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1904-05_part7_vol-a_part6 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1901-02_part8_vol-a_part7 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1906-07_part7_vol-b_part1 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1895_part3_vol-a_part1 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1886_part-stid_vol-1_part4 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1906-07_part6_vol-b_part0 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1899-00_part8_vol-a_part5 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1899-00_part3_vol-a_part1 -2025-05-08 14:20:20,694 - INFO - id: digistorting_1900-01_part2_vol-a_part1 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1895_part6_vol-a_part3 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1903-04_part8_vol-a_part1 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1901-02_part7_vol-a_part7 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1873_part-stid_vol-2_part3 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1908_part3_vol-a_part3 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1908_part6_vol-a_part3 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1901-02_part8_vol-a_part4 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1889_part1_vol-a_part1 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1903-04_part2_vol-a_part0 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1892_part2_vol-b_part0 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1901-02_part7_vol-a_part2 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1889_part7_vol-a_part5 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1903-04_part7_vol-b_part3 -2025-05-08 14:20:20,700 - INFO - id: digistorting_1909_part3_vol-b_part1 -2025-05-08 14:20:20,707 - INFO - id: digistorting_1899-00_part1_vol-c_part0 -2025-05-08 14:20:20,707 - INFO - id: digistorting_1903-04_part7_vol-b_part1 -2025-05-08 14:20:20,708 - INFO - id: digistorting_1906-07_part7_vol-a_part9 -2025-05-08 14:20:20,708 - INFO - id: digistorting_1902-03_part7_vol-b_part0 -2025-05-08 14:20:20,708 - INFO - id: digistorting_1877_part-stid_vol-1_part1 -2025-05-08 14:20:20,708 - INFO - id: digistorting_1906-07_part6_vol-a_part1 -2025-05-08 14:20:20,708 - INFO - id: digistorting_1876_part-stid_vol-1_part1 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1900-01_part8_vol-a_part1 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1909_part7_vol-a_part6 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1889_part1_vol-b_part0 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1891_part6_vol-a_part1 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1891_part7_vol-a_part0 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1905-06_part7_vol-a_part4 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1898_part2_vol-a_part1 -2025-05-08 14:20:20,711 - INFO - id: digistorting_1896_part8_vol-a_part1 -2025-05-08 14:20:20,714 - INFO - id: digistorting_1901-02_part8_vol-a_part8 -2025-05-08 14:20:20,714 - INFO - id: digistorting_1898-99_part5_vol-a_part0 -2025-05-08 14:20:20,714 - INFO - id: digistorting_1890_part6_vol-a_part3 -2025-05-08 14:20:20,714 - INFO - id: digistorting_1888_part2_vol-a_part0 -2025-05-08 14:20:20,714 - INFO - id: digistorting_1909_part8_vol-a_part6 -2025-05-08 14:20:20,714 - INFO - id: digistorting_1892_part3_vol-b_part2 -2025-05-08 14:20:20,717 - INFO - id: digistorting_1893_part7_vol-II_part4 -2025-05-08 14:20:20,717 - INFO - id: digistorting_1897_part2_vol-a_part0 -2025-05-08 14:20:20,717 - INFO - id: digistorting_1875_part-stid_vol-1_part1 -2025-05-08 14:20:20,717 - INFO - id: digistorting_1909_part3_vol-a_part0 -2025-05-08 14:20:20,717 - INFO - id: digistorting_1899-00_part8_vol-a_part7 -2025-05-08 14:20:20,719 - INFO - id: digistorting_1901-02_part7_vol-a_part4 -2025-05-08 14:20:20,719 - INFO - id: digistorting_1901-02_part3_vol-b_part1 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1888_part2_vol-a_part2 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1904-05_part8_vol-a_part3 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1888_part7_vol-a_part7 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1886_part-stid_vol-1_part6 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1897_part1_vol-b_part3 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1906-07_part8_vol-a_part2 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1891_part3_vol-a_part0 -2025-05-08 14:20:20,720 - INFO - id: digistorting_1894_part2_vol-b_part1 -2025-05-08 14:20:20,724 - INFO - id: digistorting_1889_part6_vol-a_part0 -2025-05-08 14:20:20,724 - INFO - id: digistorting_1902-03_part7_vol-b_part6 -2025-05-08 14:20:20,724 - INFO - id: digistorting_1904-05_part7_vol-b_part1 -2025-05-08 14:20:20,725 - INFO - id: digistorting_1892_part8_vol-a_part2 -2025-05-08 14:20:20,725 - INFO - id: digistorting_1892_part8_vol-a_part3 -2025-05-08 14:20:20,725 - INFO - id: digistorting_1898_part3_vol-a_part0 -2025-05-08 14:20:20,725 - INFO - id: digistorting_1903-04_part1_vol-c_part0 -2025-05-08 14:20:20,725 - INFO - id: digistorting_1891_part2_vol-b_part1 -2025-05-08 14:20:20,727 - INFO - id: digistorting_1892_part1_vol-b_part2 -2025-05-08 14:20:20,727 - INFO - id: digistorting_1887_part-stid_vol-2_part7 -2025-05-08 14:20:20,727 - INFO - id: digistorting_1895_part1_vol-b_part2 -2025-05-08 14:20:20,728 - INFO - id: digistorting_1894_part7_vol-I_part3 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1898-99_part7_vol-a_part2 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1892_part7_vol-a_part6 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1902-03_part6_vol-a_part1 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1893_part1_vol-b_part1 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1903-04_part7_vol-a_part4 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1896_part5_vol-a_part1 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1873_part-stid_vol-1_part2 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1894_part1_vol-a_part2 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1899-00_part7_vol-a_part11 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1890_part2_vol-b_part1 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1896_part3_vol-b_part2 -2025-05-08 14:20:20,729 - INFO - id: digistorting_1896_part3_vol-a_part0 -2025-05-08 14:20:20,735 - INFO - id: digistorting_1892_part6_vol-a_part1 -2025-05-08 14:20:20,735 - INFO - id: digistorting_1903-04_part8_vol-a_part0 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1893_part6_vol-a_part0 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1903-04_part6_vol-a_part4 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1905-06_part7_vol-a_part3 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1906-07_part8_vol-a_part3 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1900-01_part7_vol-a_part4 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1893_part6_vol-a_part1 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1908_part7_vol-b_part3 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1903-04_part2_vol-a_part2 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1911_part3_vol-a_part4 -2025-05-08 14:20:20,736 - INFO - id: digistorting_1909_part3_vol-b_part3 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1909_part3_vol-b_part4 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1900-01_part6_vol-a_part1 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1887_part-stid_vol-1_part4 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1888_part7_vol-a_part1 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1873_part-stid_vol-2_part2 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1889_part1_vol-b_part2 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1899-00_part7_vol-a_part6 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1899-00_part7_vol-a_part9 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1888_part3_vol-b_part1 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1909_part3_vol-a_part4 -2025-05-08 14:20:20,741 - INFO - id: digistorting_1899-00_part7_vol-a_part10 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1908_part7_vol-b_part6 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1906-07_part2_vol-b_part2 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1906-07_part6_vol-a_part5 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1901-02_part8_vol-a_part3 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1902-03_part7_vol-b_part4 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1888_part2_vol-a_part1 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1910_part3_vol-a_part0 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1896_part1_vol-b_part1 -2025-05-08 14:20:20,745 - INFO - id: digistorting_1900-01_part1_vol-b_part0 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1896_part6_vol-b_part1 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1891_part1_vol-b_part3 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1887_part-stid_vol-2_part9 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1897_part2_vol-b_part1 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1886_part2_vol-c_part0 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1904-05_part7_vol-b_part0 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1887_part1_vol-a_part3 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1908_part7_vol-b_part5 -2025-05-08 14:20:20,749 - INFO - id: digistorting_1904-05_part7_vol-b_part5 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1888_part7_vol-a_part8 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1901-02_part6_vol-a_part4 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1906-07_part7_vol-b_part4 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1898-99_part7_vol-a_part4 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1894_part2_vol-a_part2 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1897_part2_vol-b_part3 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1902-03_part7_vol-a_part0 -2025-05-08 14:20:20,753 - INFO - id: digistorting_1868-69_part-stid_vol-2_part5 -2025-05-08 14:20:20,757 - INFO - id: digistorting_1897_part6_vol-b_part1 -2025-05-08 14:20:20,757 - INFO - id: digistorting_1902-03_part3_vol-a_part1 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1893_part7_vol-II_part5 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1896_part6_vol-b_part0 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1892_part1_vol-b_part1 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1911_part2_vol-c_part0 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1898-99_part8_vol-a_part1 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1894_part8_vol-a_part8 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1908_part6_vol-a_part2 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1903-04_part7_vol-b_part0 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1906-07_part5_vol-a_part1 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1900-01_part6_vol-b_part0 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1892_part2_vol-b_part2 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1906-07_part1_vol-a_part3 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1899-00_part1_vol-b_part0 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1909_part7_vol-b_part0 -2025-05-08 14:20:20,759 - INFO - id: digistorting_1896_part3_vol-b_part0 -2025-05-08 14:20:20,766 - INFO - id: digistorting_1909_part7_vol-b_part1 -2025-05-08 14:20:20,766 - INFO - id: digistorting_1898-99_part7_vol-a_part7 -2025-05-08 14:20:20,766 - INFO - id: digistorting_1871_part-stid_vol-1_part2 -2025-05-08 14:20:20,766 - INFO - id: digistorting_1899-00_part6_vol-a_part3 -2025-05-08 14:20:20,766 - INFO - id: digistorting_1908_part8_vol-a_part2 -2025-05-08 14:20:20,766 - INFO - id: digistorting_1904-05_part3_vol-a_part5 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1903-04_part7_vol-a_part6 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1897_part7_vol-II_part0 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1886_part-stid_vol-1_part9 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1889_part7_vol-a_part3 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1896_part5_vol-a_part0 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1908_part7_vol-a_part6 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1898-99_part8_vol-a_part4 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1889_part7_vol-a_part8 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1894_part7_vol-II_part2 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1901-02_part7_vol-a_part0 -2025-05-08 14:20:20,769 - INFO - id: digistorting_1906-07_part5_vol-a_part0 -2025-05-08 14:20:20,774 - INFO - id: digistorting_1893_part1_vol-b_part2 -2025-05-08 14:20:20,774 - INFO - id: digistorting_1874_part-stid_vol-2_part0 -2025-05-08 14:20:20,775 - INFO - id: digistorting_1894_part1_vol-b_part0 -2025-05-08 14:20:20,775 - INFO - id: digistorting_1894_part8_vol-a_part4 -2025-05-08 14:20:20,775 - INFO - id: digistorting_1904-05_part7_vol-b_part6 -2025-05-08 14:20:20,776 - INFO - id: digistorting_1903-04_part1_vol-b_part1 -2025-05-08 14:20:20,776 - INFO - id: digistorting_1901-02_part5_vol-a_part0 -2025-05-08 14:20:20,776 - INFO - id: digistorting_1896_part7_vol-II_part5 -2025-05-08 14:20:20,776 - INFO - id: digistorting_1890_part6_vol-a_part2 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1887_part4_vol-a_part1 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1875_part-stid_vol-2_part1 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1887_part-stid_vol-1_part1 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1891_part7_vol-a_part4 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1888_part1_vol-b_part2 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1888_part7_vol-a_part3 -2025-05-08 14:20:20,778 - INFO - id: digistorting_1902-03_part1_vol-b_part3 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1905-06_part6_vol-b_part0 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1899-00_part2_vol-a_part1 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1910_part4_vol-a_part1 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1898-99_part6_vol-a_part3 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1890_part8_vol-a_part3 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1886_part-stid_vol-2_part1 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1874_part-stid_vol-1_part3 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1868-69_part-stid_vol-2_part3 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1905-06_part2_vol-a_part2 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1875_part-stid_vol-1_part5 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1891_part1_vol-a_part0 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1908_part8_vol-a_part4 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1898-99_part7_vol-a_part3 -2025-05-08 14:20:20,781 - INFO - id: digistorting_1895_part3_vol-a_part0 -2025-05-08 14:20:20,787 - INFO - id: digistorting_1909_part6_vol-a1_part3 -2025-05-08 14:20:20,787 - INFO - id: digistorting_1889_part7_vol-a_part1 -2025-05-08 14:20:20,787 - INFO - id: digistorting_1889_part3_vol-a_part1 -2025-05-08 14:20:20,787 - INFO - id: digistorting_1895_part5_vol-a_part1 -2025-05-08 14:20:20,789 - INFO - id: digistorting_1909_part3_vol-a_part3 -2025-05-08 14:20:20,789 - INFO - id: digistorting_1895_part6_vol-a_part4 -2025-05-08 14:20:20,789 - INFO - id: digistorting_1897_part6_vol-a_part3 -2025-05-08 14:20:20,789 - INFO - id: digistorting_1888_part4_vol-a_part0 -2025-05-08 14:20:20,789 - INFO - id: digistorting_1899-00_part1_vol-c_part1 -2025-05-08 14:20:20,789 - INFO - id: digistorting_1908_part7_vol-b_part8 -2025-05-08 14:20:20,792 - INFO - id: digistorting_1872_part-stid_vol-2_part1 -2025-05-08 14:20:20,792 - INFO - id: digistorting_1890_part1_vol-b_part3 diff --git a/data/ncc_parliament/ncc_parliament.md b/data/ncc_parliament/ncc_parliament.md deleted file mode 100644 index 0b3ac618bfaa9f39a0105cbcb3c9eff30247981d..0000000000000000000000000000000000000000 --- a/data/ncc_parliament/ncc_parliament.md +++ /dev/null @@ -1,146 +0,0 @@ ---- -pretty_name: Norwegian Colossal Corpus (parliament) -language: -- da -license: other -license_name: NLOD 2.0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Other ---- - -# Dataset Card for Norwegian Colossal Corpus (parliament) - - -Collections from the Norwegian parliament in Danish. Extracted from the [Norwegian Colossal Corpus](https://huggingface.co/datasets/NbAiLab/NCC) derived from ocr. - - -The Norwegian Colossal Corpus is a collection of multiple smaller Norwegian corpuses suitable for training large language models. - -## Dataset Description - - -- **Number of samples**: 1.08K -- **Number of tokens (Llama 3)**: 338.87M -- **Average document length in tokens (min, max)**: 314.64K (129, 373.59K) - - - -## Dataset Structure -An example from the dataset looks as follows. - -```py -{ - "id": "digistorting_1906-07_part6_vol-a_part2", - "text": "Liknes —Aaroot i foranstaaende oversigt over omkostningerne er beregnet til kr. 37 500,00 under foru[...]", - "source": "ncc_parliament", - "added": "2025-05-08", - "created": "2021-01-01, 2021-12-31", - "token_count": 360308 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - - -### Dataset Statistics - - -

- -

- - -## Additional Information - -## License Information - -This dataset is licensed under [NLOD 2.0](https://data.norge.no/nlod/en/2.0). -This license is derived from the original [publication](https://huggingface.co/datasets/NbAiLab/NCC), which is published by the -[National Library of Norway](https://www.nb.no/en/). - -## Filtering - -This subset is the result of the following filtering from all available data splits on the [NCC](https://huggingface.co/datasets/NbAiLab/NCC): - -- is_parliament: Documents, which are tagged as parliament data -- language_filter: Document is classified as Danish with a threshold of 0.75 -- min_length: Document has at least 10 words (whitespace separated strings + punctuation) -- alpha_ratio: The ratio of all words / words with only alphabetical characters is at least 0.7 -- min_stop_words: The document contains at least 2 Danish stop words -- duplicate: Duplicate documents were removed - -The effect of each of these steps is outlined in the table below: - -| Filtering step | Number of document | -| --------------- | ------------------ | -| is_parliament | 9 528 | -| language_filter | 1 275 | -| min_length | 1 275 | -| alpha_ratio | 1 077 | -| min_stop_words | 1 077 | -| duplicate | 1 077 | - -Note that a total of 976 long texts (>~1e5 tokens) were found. - - -### Quality - -It is important to note, that recurring [OCR](https://en.wikipedia.org/wiki/Optical_character_recognition) errors and historic expressions in older -texts hinder the legibility of some of the documents and make differentiating between Norwegian and Danish difficult. - - -### Citation Information - -If you use this source please cite the following articles: - -``` -@inproceedings{kummervold-etal-2022-norwegian-colossal, - title = {The {N}orwegian colossal corpus: A text corpus for training large {N}orwegian language models}, - author = {Kummervold, Per E and - Wetjen, Freddy and - De la Rosa, Javier}, - booktitle = {Proceedings of the Thirteenth Language Resources and Evaluation Conference (LREC)}, - year = {2022}, - address = {Marseille, France}, - publisher = {European Language Resources Association}, - url = {https://aclanthology.org/2022.lrec-1.410}, - pages = {3852--3860}, - abstract = {Norwegian has been one of many languages lacking sufficient available text to train quality language models. In an attempt to bridge this gap, we introduce the Norwegian Colossal Corpus (NCC), which comprises 49GB of clean Norwegian textual data containing over 7B words. The NCC is composed of different and varied sources, ranging from books and newspapers to government documents and public reports, showcasing the various uses of the Norwegian language in society. The corpus contains mainly Norwegian Bokmål and Norwegian Nynorsk. Each document in the corpus is tagged with metadata that enables the creation of sub-corpora for specific needs. Its structure makes it easy to combine with large web archives that for licensing reasons could not be distributed together with the NCC. By releasing this corpus openly to the public, we hope to foster the creation of both better Norwegian language models and multilingual language models with support for Norwegian.}, -} - -@inproceedings{kummervold-etal-2021-operationalizing, - title = {Operationalizing a National Digital Library: The Case for a {N}orwegian Transformer Model}, - author = {Kummervold, Per E and - De la Rosa, Javier and - Wetjen, Freddy and - Brygfjeld, Svein Arne}, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa)}, - year = {2021}, - address = {Reykjavik, Iceland (Online)}, - publisher = {Linköping University Electronic Press, Sweden}, - url = {https://aclanthology.org/2021.nodalida-main.3}, - pages = {20--29}, - abstract = {In this work, we show the process of building a large-scale training set from digital and digitized collections at a national library. - The resulting Bidirectional Encoder Representations from Transformers (BERT)-based language model for Norwegian outperforms multilingual BERT (mBERT) models - in several token and sequence classification tasks for both Norwegian Bokmål and Norwegian Nynorsk. Our model also improves the mBERT performance for other - languages present in the corpus such as English, Swedish, and Danish. For languages not included in the corpus, the weights degrade moderately while keeping strong multilingual properties. Therefore, - we show that building high-quality models within a memory institution using somewhat noisy optical character recognition (OCR) content is feasible, and we hope to pave the way for other memory institutions to follow.}, -} - -``` diff --git a/data/ncc_parliament/ncc_parliament.parquet b/data/ncc_parliament/ncc_parliament.parquet deleted file mode 100644 index 2fe22e8494f94de60b58570b72e9951e24aa5207..0000000000000000000000000000000000000000 --- a/data/ncc_parliament/ncc_parliament.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:41256bd4001d87fc05c81a4757b5203c01cb7bb9b7ea7d7d3ac89c3c5c393951 -size 541074238 diff --git a/data/nordjyllandnews/create.py b/data/nordjyllandnews/create.py deleted file mode 100644 index 72b467092c00ac23119b481152fc5699b6288c20..0000000000000000000000000000000000000000 --- a/data/nordjyllandnews/create.py +++ /dev/null @@ -1,51 +0,0 @@ -""" -This scripts download nordjylland news and converts it to the format of danish dynaword -""" - -import random -from pathlib import Path -from typing import cast - -from datasets import Dataset, load_dataset - -schemas = [ - "{summary}\n\n{text}", - "{text}\n\nOpsummering:\n{summary}", - "{text}\n\nReferat:\n{summary}", - "Lav et referat af nedenstående tekst:\n\nTekst:\n{text}\n\nReferat:\n{summary}", -] -source = "nordjyllandnews" - - -def convert_sample(example): - schema = random.choice(schemas) - new_example = dict( - text_new=schema.format(text=example["text"], summary=example["summary"]), - source=source, - domain="News", - license="Creative Commons Legal Code\n\nCC0 1.0 Universal", - added="2024-12-16", - created="2000-01-01, 2024-01-01", # best guess - metadata={"source-pretty": "Nordjylland News"}, - ) - - return new_example - - -def main(): - ds = load_dataset("alexandrainst/nordjylland-news-summarization", split="train") - ds = cast(Dataset, ds) - - ds = ds.map(convert_sample, remove_columns=ds.column_names) - ds = ds.rename_columns({"text_new": "text"}) - ds = ds.add_column("id", [f"{source}_{i}" for i in range(len(ds))]) # type: ignore - ds = ds.select_columns( - ["text", "source", "id", "added", "created", "license", "domain", "metadata"] - ) - - save_path = Path(__file__).parent / f"{source}.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/nordjyllandnews/descriptive_stats.json b/data/nordjyllandnews/descriptive_stats.json deleted file mode 100644 index 91f856f4a32f8e021fd8434f356542a99649d991..0000000000000000000000000000000000000000 --- a/data/nordjyllandnews/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 75215, - "number_of_tokens": 37904580, - "min_length_tokens": 29, - "max_length_tokens": 12257, - "number_of_characters": 115853123, - "min_length_characters": 96, - "max_length_characters": 35269 -} \ No newline at end of file diff --git a/data/nordjyllandnews/images/dist_document_length.png b/data/nordjyllandnews/images/dist_document_length.png deleted file mode 100644 index 49dca13542ed8daf456018f33e1f52458497dcda..0000000000000000000000000000000000000000 --- a/data/nordjyllandnews/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cb948d112b1d53092ec8aee6ced6c6f7d4769ac22af91fcaf297e86e765580ba -size 561793 diff --git a/data/nordjyllandnews/nordjyllandnews.md b/data/nordjyllandnews/nordjyllandnews.md deleted file mode 100644 index 837ef3805d547e113f327cc77076cd4bc2311d71..0000000000000000000000000000000000000000 --- a/data/nordjyllandnews/nordjyllandnews.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -pretty_name: Nordjylland News -language: -- da -license: cc0-1.0 -license_name: CC-0 -size_categories: -- 10-100k -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- alexandrainst/nordjylland-news-summarization -domains: -- News ---- - -# Dataset Card for Nordjylland News - - -Articles from the Danish Newspaper [TV2 Nord](https://www.tv2nord.dk). - - - -The data is derived from the Huggingface dataset [alexandrainst/nordjylland-news-summarization](https://huggingface.co/datasets/alexandrainst/nordjylland-news-summarization) originally intended for text summarization. - -## Dataset Description - - - -- **Number of samples**: 75.22K -- **Number of tokens (Llama 3)**: 37.90M -- **Average document length in tokens (min, max)**: 503.9497440670079 (29, 12.26K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "nordjyllandnews_0", - "text": "Lav et referat af nedenstående tekst:\n\nTekst:\nOpdatering: Manden er nu fundet af Nordjyllands Politi[...]", - "source": "nordjyllandnews", - "added": "2024-12-16", - "created": "2000-01-01, 2024-01-01", - "token_count": 628 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - - -## Opportunities for Improvement - -An updated version of the this data could be fetched from their [API](https://developer.bazo.dk/#876ab6f9-e057-43e3-897a-1563de34397e). - -# Sourced data -This dataset is derived from [`alexandrainst/nordjylland-news-summarization`](https://huggingface.co/datasets/alexandrainst/nordjylland-news-summarization) - -### Citation Information - -No citation is applicable for this work. We recommend citing the huggingface repository. diff --git a/data/nordjyllandnews/nordjyllandnews.parquet b/data/nordjyllandnews/nordjyllandnews.parquet deleted file mode 100644 index 80d588d80276f533e29c4b593373a00e3456eeb2..0000000000000000000000000000000000000000 --- a/data/nordjyllandnews/nordjyllandnews.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8a55787f77e8713ddc0a1d51255f06cc9959ffb1f929538142519b3d8c3509ec -size 71432563 diff --git a/data/nota/create.py b/data/nota/create.py deleted file mode 100644 index 671aa9b057c4885aaa0637e3249dd6c4bf0eb055..0000000000000000000000000000000000000000 --- a/data/nota/create.py +++ /dev/null @@ -1,77 +0,0 @@ -from collections import defaultdict -from io import BytesIO -from pathlib import Path -import requests -from zipfile import ZipFile - -import pandas as pd - -URL = "https://sprogtek-ressources.digst.govcloud.dk/nota/Nota-txt_only.zip" - -column_order = [ - "text", - "source", - "id", - "added", - "created", - "license", - "domain", - "metadata", -] - - -def convert_sample(id: str, text: str) -> dict: - year = id[4:8] - new_example = dict( - text=text, - id=id.split("_")[0], - source="nota", - domain="Readaloud", - license="Creative Commons Legal Code\n\nCC0 1.0 Universal", - added="2025-02-03", - created=f"{year}-01-01, {year}-12-31", # assuming v2018 - metadata={"source-pretty": "Nota lyd- og tekstdata"}, - ) - - return new_example - - -def download_and_process_zip(url): - response = requests.get(url) - response.raise_for_status() # Ensure we got a valid response - - with ZipFile(BytesIO(response.content), "r") as z: - file_groups = defaultdict(list) - - # Read all text files from the ZIP - for file_name in z.namelist(): - if file_name.endswith(".txt"): # Process only text files - prefix = file_name.split("/")[1].split("_")[0] - with z.open(file_name) as f: - file_groups[prefix].append(f.read().decode("utf-8")) - - # Combine files with the same prefix - combined_files = { - f"{prefix}_combined.txt": "\n".join(contents) - for prefix, contents in file_groups.items() - } - - return combined_files # Dictionary with combined file names and contents - - -def main(): - combined_results = download_and_process_zip(URL) - dataset = [] - for filename, content in combined_results.items(): - sample = convert_sample(filename, content) - dataset.append(sample) - - df = pd.DataFrame(dataset) - df = df.drop_duplicates(keep="first", subset=["text"]) - - save_path = Path(__file__).parent / "nota.parquet" - df.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/nota/descriptive_stats.json b/data/nota/descriptive_stats.json deleted file mode 100644 index 63370f780f78658ecaba8a5bd9cf31e4839a77a7..0000000000000000000000000000000000000000 --- a/data/nota/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 446, - "number_of_tokens": 7299012, - "min_length_tokens": 4477, - "max_length_tokens": 107261, - "number_of_characters": 17371602, - "min_length_characters": 10134, - "max_length_characters": 314671 -} \ No newline at end of file diff --git a/data/nota/images/dist_document_length.png b/data/nota/images/dist_document_length.png deleted file mode 100644 index 4f8de104fe75a5cc3c9c16092b92c6157890262f..0000000000000000000000000000000000000000 --- a/data/nota/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e52af3201b13b825141138b44e5360037b932e9d4d3a7b4424f782f05834cf52 -size 542600 diff --git a/data/nota/nota.md b/data/nota/nota.md deleted file mode 100644 index d54b763eb95b5fc7ee7ac9f00751fbacca69451c..0000000000000000000000000000000000000000 --- a/data/nota/nota.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -pretty_name: Nota lyd- og tekstdata (Tekst only) -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- https://sprogteknologi.dk/dataset/nota-lyd-og-tekstdata -domains: -- Readaloud -- Spoken ---- - -# Dataset Card for Nota lyd- og tekstdata (Tekst only) - - -The text only part of the [Nota lyd- og tekstdata](https://sprogteknologi.dk/dataset/nota-lyd-og-tekstdata) dataset. - - -## Dataset Description - - -- **Number of samples**: 446 -- **Number of tokens (Llama 3)**: 7.30M -- **Average document length in tokens (min, max)**: 16.37K (4.48K, 107.26K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - -```py -{ - "id": "INSL20160004", - "text": "Inspiration nr. 4, 2016\nBiblioteksbetjening \nTelefon: 39 13 46 00\nEmail: biblioteket@nota.dk\nInspira[...]", - "source": "nota", - "added": "2025-02-03", - "created": "2016-01-01, 2016-12-31", - "token_count": 69977 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Additional Processing - - - - -### Dataset Statistics - - -

- -

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/nota/nota.parquet b/data/nota/nota.parquet deleted file mode 100644 index 491c27eced3ef5c17d73a51496f9c013324a4381..0000000000000000000000000000000000000000 --- a/data/nota/nota.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c86840b878a3c0bc9bcad13d4766bcb1c10f69cde7feb5ae5852c7421cba3331 -size 7904509 diff --git a/data/opensubtitles/create.py b/data/opensubtitles/create.py deleted file mode 100644 index ba3721eb2ab0854015cb7cd92aaf4583b113718e..0000000000000000000000000000000000000000 --- a/data/opensubtitles/create.py +++ /dev/null @@ -1,131 +0,0 @@ -# /// script -# requires-python = "==3.12" -# dependencies = [ -# "datasets==3.2.0", -# "spacy==3.8.3", -# ] -# /// - -from pathlib import Path -from typing import cast - -import pandas as pd -import spacy -from datasets import Dataset, load_dataset - -# KCE: mail from Leon -sample_to_redact = { - # Der kommer en dag - "opensub_6726481", - "opensub_6732371", - # Kollektivet - "opensub_6645818", - # Flaskepost fra P - "opensub_6666922", - "opensub_6720216", - "opensub_6958711", - # Fasandræberne - "opensub_6036947", - "opensub_6008622", - # En du elsker - "opensub_5828376", - "opensub_5828378", - # En chance til - "opensub_6177523", - # Lev stærkt - "opensub_6467655", - # Nymphomaniac - "opensub_5604391", - "opensub_5748340", - "opensub_5748494", - "opensub_5629516", - # Kvinden i buret - "opensub_5636248", - "opensub_5514603", - "opensub_5504932", - # Den skaldede frisør - "opensub_5084880", - "opensub_5031826", - # Jagten - "opensub_6929419", - "opensub_4885548", - # Melancholia - "opensub_4421330", - "opensub_4406991", - "opensub_4418817", - # Ambassadøren - "opensub_4557721", - # Antichrist - "opensub_5511502", - "opensub_3938655", - "opensub_3636940", - "opensub_3564521", - "opensub_3562215", - # En kongelig affære - "opensub_4725493", - "opensub_4725160", - "opensub_4725159", - "opensub_4916871", - "opensub_5186746", - # Brødre - "opensub_233943", - "opensub_87475", -} - -column_order = [ - "text", - "source", - "id", - "added", - "created", - "license", - "domain", - "metadata", -] - - -def convert_sample(example: dict) -> dict: - text = example["text"] - if example["doc_id"] in sample_to_redact: - nlp = spacy.blank("da") - doc = nlp(text) - text = doc[:200].text # first 200 words - - new_example = dict( - text_new=text, - id=example["doc_id"], - source="opensubtitles", - domain="Conversation", - license="Creative Commons Legal Code\n\nCC0 1.0 Universal", - added="2025-01-02", - created="1920-01-01, 2018-01-01", # assuming v2018 - metadata={"source-pretty": "OpenSubtitles"}, - ) - - return new_example - - -def main(): - ds = load_dataset("DDSC/partial-danish-gigaword-no-twitter", split="train") - ds = cast(Dataset, ds) - ds = ds.filter(lambda x: x["source"] == "opensub", num_proc=4) - ds = ds.map(convert_sample, num_proc=4) - ds = ds.select_columns(column_order[1:] + ["text_new"]) - ds = ds.rename_columns({"text_new": "text"}) - # ensure order - ds = ds.select_columns(column_order) - - df = ds.to_pandas() - df = cast(pd.DataFrame, df) - dedup_df = df.drop_duplicates(keep="first", subset=["text"]) - print("N. duplicates: ", df.shape[0] - dedup_df.shape[0]) # 2422 - - ds = ds.select(dedup_df.index) - assert len(set(ds["text"])) == len(ds) - - save_path = Path(__file__).parent / "opensubtitles.parquet" - ds.to_parquet(save_path) - - -if __name__ == "__main__": - main() diff --git a/data/opensubtitles/descriptive_stats.json b/data/opensubtitles/descriptive_stats.json deleted file mode 100644 index 153571c2a4b396efdf151f2ca04dddad2c9653b4..0000000000000000000000000000000000000000 --- a/data/opensubtitles/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 29820, - "number_of_tokens": 271599443, - "min_length_tokens": 45, - "max_length_tokens": 70137, - "number_of_characters": 784206884, - "min_length_characters": 95, - "max_length_characters": 196114 -} \ No newline at end of file diff --git a/data/opensubtitles/images/dist_document_length.png b/data/opensubtitles/images/dist_document_length.png deleted file mode 100644 index c8812871edb95442a7d10cee87fc5177aac05334..0000000000000000000000000000000000000000 --- a/data/opensubtitles/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:163dca90fbe2598242e6bdb8646face945d7f3dbcccdaa38da5686f14ff8863d -size 547696 diff --git a/data/opensubtitles/opensubtitles.md b/data/opensubtitles/opensubtitles.md deleted file mode 100644 index 121d6233b083751c7589de8f996a956911f0ccd9..0000000000000000000000000000000000000000 --- a/data/opensubtitles/opensubtitles.md +++ /dev/null @@ -1,156 +0,0 @@ ---- -pretty_name: OpenSubtitles -language: -- da -license: cc0-1.0 -license_name: CC-0 -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- DDSC/partial-danish-gigaword-no-twitter -domains: -- Conversation -- Subtitles -- Spoken ---- - -# Dataset Card for OpenSubtitles - - -Danish subsection of [OpenSubtitles](https://opus.nlpl.eu/OpenSubtitles/corpus/version/OpenSubtitles). - - - -## Dataset Description - - -- **Number of samples**: 29.82K -- **Number of tokens (Llama 3)**: 271.60M -- **Average document length in tokens (min, max)**: 9.11K (45, 70.14K) - - - -## Dataset Structure -An example from the dataset looks as follows. - - -```py -{ - "id": "opensub_6822913", - "text": "Tidligere i vikingerne...\nJeg skal gå tilbage til England.\nBurde være gået tilbage for lang tid side[...]", - "source": "opensubtitles", - "added": "2025-01-02", - "created": "1920-01-01, 2018-01-01", - "token_count": 3559 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - - -### Additional Processing - -Due to copyright concern additional documents have been removed due to copyright concerns. These include: - -```py -{ - # Der kommer en dag - "opensub_6726481", - "opensub_6732371", - # Kollektivet - "opensub_6645818", - # Flaskepost fra P - "opensub_6666922", - "opensub_6720216", - "opensub_6958711", - # Fasandræberne - "opensub_6036947", - "opensub_6008622", - # En du elsker - "opensub_5828376", - "opensub_5828378", - # En chance til - "opensub_6177523", - # Lev stærkt - "opensub_6467655", - # Nymphomaniac - "opensub_5604391", - "opensub_5748340", - "opensub_5748494", - "opensub_5629516", - # Kvinden i buret - "opensub_5636248", - "opensub_5514603", - "opensub_5504932", - # Den skaldede frisør - "opensub_5084880", - "opensub_5031826", - # Jagten - "opensub_6929419", - "opensub_4885548", - # Melancholia - "opensub_4421330", - "opensub_4406991", - "opensub_4418817", - # Ambassadøren - "opensub_4557721", - # Antichrist - "opensub_5511502", - "opensub_3938655", - "opensub_3636940", - "opensub_3564521", - "opensub_3562215", - # En kongelig affære - "opensub_4725493", - "opensub_4725160", - "opensub_4725159", - "opensub_4916871", - "opensub_5186746", - # Brødre - "opensub_233943", - "opensub_87475", -} -``` - -We have additionally removed duplicate entries from the original dataset. - -### Dataset Statistics - - -

- -

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/opensubtitles/opensubtitles.parquet b/data/opensubtitles/opensubtitles.parquet deleted file mode 100644 index 1d5a421a2701d4d216a6809ad21105914ec59975..0000000000000000000000000000000000000000 --- a/data/opensubtitles/opensubtitles.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:14ee10c6512a052c404190a7494417e69defcad8f9ec756b267f3ad549dd932c -size 496057498 diff --git a/data/relig/descriptive_stats.json b/data/relig/descriptive_stats.json deleted file mode 100644 index 1640b5a77eabb078ecca08066aed943d557b13a4..0000000000000000000000000000000000000000 --- a/data/relig/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 66, - "number_of_tokens": 1243970, - "min_length_tokens": 473, - "max_length_tokens": 66422, - "number_of_characters": 3555655, - "min_length_characters": 1403, - "max_length_characters": 183661 -} \ No newline at end of file diff --git a/data/relig/images/dist_document_length.png b/data/relig/images/dist_document_length.png deleted file mode 100644 index f2dcd2e06c0d5cce4bbe1bfd86602a230c0c78cd..0000000000000000000000000000000000000000 --- a/data/relig/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:af8d9068ee76992e89579ffa1cb7f75d35d9a7e6e7c4ab3aa932edaf15763c75 -size 531321 diff --git a/data/relig/relig.md b/data/relig/relig.md index 7a08f4e008c746b068b6feb874b99c3a6917eb79..c40f0068eb64963dfd0835c52ab8dd0c335a5b4a 100644 --- a/data/relig/relig.md +++ b/data/relig/relig.md @@ -1,95 +1,56 @@ --- pretty_name: Religious texts language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Books + - language-modeling --- - # Dataset Card for Religious texts - - -Danish religious text from the 1700-2022. - - -This section contains a Danish translation of the Bible from the Massively Parallel Bible corpus (Christodouloupoulos and Steedman, 2015) -without any pre-processing other than file format conversion. - - ## Dataset Description - - - -- **Number of samples**: 66 -- **Number of tokens (Llama 3)**: 1.24M -- **Average document length in tokens (min, max)**: 18.85K (473, 66.42K) - - - - -## Dataset Structure +- **Number of records:** 66 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "relig_SON", - "text": "Salomos Højsang\nKys mig, giv mig Kys af din mund thi din Kærlighed er bedre end Vin.\nLifligt dufter [...]", - "source": "relig", - "added": "2020-09-14", - "created": "1700-01-01, 2022-01-01", - "token_count": 4099 + 'text': 'Salomos Højsang +Kys mig, giv mig Kys af din mund t', + 'source': 'relig', + 'id': 'relig_SON', + 'added': '2020-09-14', + 'created': '1700-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Religious texts' + } } ``` -### Data Fields +## Data Fields -An entry in the dataset consists of the following fields: +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/relig/relig.parquet b/data/relig/relig.parquet index 086bfeae77ec03189aa881308735be9de3d69115..5a7fb1dfd7df3b1bc0ed0e63e5fe18552106cfed 100644 --- a/data/relig/relig.parquet +++ b/data/relig/relig.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:4a7a67dd602c035b31eaf403d8d017398ceb81d38b232649d3b9381a0b35d5c7 -size 2000386 +oid sha256:ba59db9efa6756fd6306380c39e9f25b50c99ddb6b7c0c2391e417d95d0af6da +size 2003050 diff --git a/data/retsinformationdk/create.py b/data/retsinformationdk/create.py deleted file mode 100644 index 846bea7615a1d2898f6982040473372c082ce082..0000000000000000000000000000000000000000 --- a/data/retsinformationdk/create.py +++ /dev/null @@ -1,166 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets==3.2.0", -# "pandas", -# "requests", -# "trafilatura", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// - -from datetime import date, datetime -from io import StringIO -import logging -from pathlib import Path -import pandas as pd -import requests -from requests.adapters import HTTPAdapter -from urllib3 import Retry -from trafilatura import extract -from datasets import Dataset -from tqdm import tqdm - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -TMP_DIR = Path(__file__).parent / "tmp" - -BASE_URL = "https://www.retsinformation.dk/api/document/eli" - -logger = logging.getLogger(__name__) -today = date.today() - - -def create_session_with_retries(retries=2, backoff_factor=0.5): - session = requests.Session() - retry_strategy = Retry( - total=retries, - backoff_factor=backoff_factor, - status_forcelist=[500, 502, 503, 504], - allowed_methods=["GET"], - respect_retry_after_header=True, - ) - adapter = HTTPAdapter(max_retries=retry_strategy) - session.mount("http://", adapter) - session.mount("https://", adapter) - return session - - -def fetch_document_list(): - download = True - csv_content = "" - - df: pd.DataFrame = pd.DataFrame() - - if TMP_DIR.exists(): - files = list(TMP_DIR.glob("*.csv")) - file = sorted(files, reverse=True)[0] - - file_date = datetime.strptime(file.stem, "%Y-%m-%d").date() - - if (today - file_date).days < 180: - download = False - df = pd.read_csv(file) - - if download: - logger.info("Downloading list of files from Retsinformation.dk") - response = requests.get( - "https://www.retsinformation.dk/api/documentsearch/csv?dt=10&dt=1480&dt=20&dt=30&dt=40&dt=50&dt=90&dt=120&dt=270&dt=60&dt=100&dt=80&dt=110&dt=130&dt=140&dt=150&dt=160&dt=170&dt=180&dt=200&dt=210&dt=220&dt=1510&dt=1490&dt=-10&dt=230&dt=240&dt=250&dt=260&dt=980&dt=360&dt=400&dt=380&dt=420&dt=1530&dt=440&dt=450&dt=430&dt=1540&dt=460&dt=410&dt=370&dt=480&dt=390&dt=500&dt=510&dt=520&dt=490&dt=300&dt=310&dt=320&dt=330&dt=340&dt=350&o=40" - ) - # response = requests.get(url, headers=headers) - response.raise_for_status() # Raise error for bad responses - - # The response is a gzip-compressed CSV in plain text - csv_content = response.content.decode("utf-16", errors="replace") - logger.info("Downloaded list of documents") - - # Optionally parse with pandas - df = pd.read_csv(StringIO(csv_content), sep=";") # Assuming semicolon separator - - df.to_csv(TMP_DIR / (today.strftime("%Y-%m-%d") + ".csv"), index=False) - - return df[ - [ - "DokumentType", - "DokumentId", - "Titel", - "Ressort", - "Historisk", - "PubliceretTidspunkt", - "EliUrl", - ] - ] - - -def fetch_document(doc_info: pd.Series, session: requests.Session) -> dict: - url = BASE_URL + doc_info["EliUrl"].strip().split("eli")[1] - - response = session.post( - url, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - }, - json={}, - ) - response.raise_for_status() - - return response.json()[0] - - -def main(): - save_path = Path(__file__).parent / "retsinformationdk.parquet" - documents = fetch_document_list() - - logger.info(f"Found {len(documents)} documents from retsinformationdk") - - session = create_session_with_retries() - docs = [] - for idx, doc_info in tqdm(documents.iterrows(), total=len(documents)): - if doc_info["Historisk"]: - continue - try: - doc = fetch_document(doc_info, session) - text = extract(doc["documentHtml"], output_format="markdown") - docs.append( - { - "id": doc_info["DokumentId"], - "text": text if text else "", - "source": "retsinformationdk", - "added": today.strftime("%Y-%m-%d"), - "created": f"{date.fromisoformat(str(doc_info['PubliceretTidspunkt'])).strftime('%Y-%m-%d')}, {date.fromisoformat(str(doc_info['PubliceretTidspunkt'])).strftime('%Y-%m-%d')}", - } - ) - except Exception as e: - logger.error(f"Ran in to error: {e}") - logger.error(f"Skipping doc {doc_info['DokumentId']}") - - ds = Dataset.from_list(docs) - - # quality checks and processing - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = add_token_count(ds) - ds = ensure_column_order(ds) - - ds.to_parquet(save_path) - - -if __name__ == "__main__": - log_path = Path(__file__).parent / "retsinformationdk.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/retsinformationdk/descriptive_stats.json b/data/retsinformationdk/descriptive_stats.json deleted file mode 100644 index a60ccc7d573bed58bb98327f499635c7e2a38426..0000000000000000000000000000000000000000 --- a/data/retsinformationdk/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 100524, - "number_of_tokens": 818252220, - "min_length_tokens": 34, - "max_length_tokens": 9593591, - "number_of_characters": 2338693895, - "min_length_characters": 103, - "max_length_characters": 21609422 -} \ No newline at end of file diff --git a/data/retsinformationdk/images/dist_document_length.png b/data/retsinformationdk/images/dist_document_length.png deleted file mode 100644 index fce05c0b2c8c656f7742c5222e475d8c9b41eb14..0000000000000000000000000000000000000000 --- a/data/retsinformationdk/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0c7be68f9042207eae602d768b6cb499a6322e8681e925271df205ba27865a2a -size 583872 diff --git a/data/retsinformationdk/retsinformationdk.md b/data/retsinformationdk/retsinformationdk.md index 5f6c03eb291811f379b9490a10017314ac11c951..0be754094f5bbae7794e1ed4d643d7180a5a2956 100644 --- a/data/retsinformationdk/retsinformationdk.md +++ b/data/retsinformationdk/retsinformationdk.md @@ -1,76 +1,51 @@ --- pretty_name: retsinformation.dk (Danish legal information) language: -- da + - da license: other license_name: Danish Copyright Law size_categories: -- 10k-100k + - 10k-100k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Legal + - language-modeling --- - # Dataset Card for retsinformation.dk (Danish legal information) - - -[retsinformation.dk](https://www.retsinformation.dk) (legal-information.dk) the official legal information system of Denmark. - - - -It serves as a central repository for Danish legislation, administrative regulations, and other legally binding documents. The platform ensures transparency and public access to laws and legal materials. The sites includes: - -1. **Legislation**: Danish laws, acts, and statutes passed by the Parliament (Folketinget). -2. **Administrative Regulations**: Rules, guidelines, and executive orders issued by governmental authorities. -3. **Historical Versions**: Archived versions of laws and regulations, useful for legal research or historical reference. -4. **Preparatory Works** (Forarbejder): Documents explaining the background and intent behind legislative acts, such as proposals and committee reports. -5. **Case Law References**: Links to decisions and interpretations that relate to specific legislation. - - ## Dataset Description - - - -- **Number of samples**: 100.52K -- **Number of tokens (Llama 3)**: 818.25M -- **Average document length in tokens (min, max)**: 8.14K (34, 9.59M) - - - -## Dataset Structure +- **Number of records:** 64043 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. +```yaml +{ + 'text': 'Den fulde tekst Pressenævnets kendelse i sag nr. 1', + 'source': 'retsinformationdk', + 'id': 'retsinformationdk_173889', + 'added': '2019-11-22', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Legal', + 'license': 'Danish Copyright law at https://www.retsinformation.dk/forms/r0710.aspx?id=164796 states + § 9. Love, administrative forskrifter, retsafgørelser og lignende offentlige aktstykker er ikke genstand for ophavsret. - -```py -{ - "id": "AA014851", - "text": "Indsamlingsnævnets afgørelse i sag nr. 22-730-00015\n\nIndsamlingsnævnet fandt det kritisabelt, at Gad[...]", - "source": "retsinformationdk", - "added": "2025-06-26", - "created": "2025-06-25, 2025-06-25", - "token_count": 4062 +Stk. 2. Bestemmelsen i stk. 1 gælder ikke for værker, der fremtræder som selvstændige bidrag i de i stk. 1 nævnte aktstykker. Sådanne værker må dog gengives i forbindelse med aktstykket. Retten til videre udnyttelse afhænger af de i øvrigt gældende regler. +', + 'source-pretty': 'retsinformation.dk (Danish legal information)' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## Data Fields +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. ## License Information
@@ -84,32 +59,3 @@ Stk. 2. Bestemmelsen i stk. 1 gælder ikke for værker, der fremtræder som selv

- - -### Dataset Statistics - - -

- -

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/retsinformationdk/retsinformationdk.parquet b/data/retsinformationdk/retsinformationdk.parquet index b0420f53d5d265257fa41190a9f9644ae245d5f3..72d4d1a128bd80e4fa5e4c81da0c070741f0d314 100644 --- a/data/retsinformationdk/retsinformationdk.parquet +++ b/data/retsinformationdk/retsinformationdk.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:191bab8a3e7ae419394a622b74ae0fe64e9b5033066eeab4a3b3d2960153d48a -size 1017748370 +oid sha256:69df3e71d482c746854535710ffb57c9ba3c9ac633931222e8be84d0e67cc22c +size 651256719 diff --git a/data/retspraksis/descriptive_stats.json b/data/retspraksis/descriptive_stats.json deleted file mode 100644 index dbaedf31b415b0f68c055ed0d160ae0689f1e764..0000000000000000000000000000000000000000 --- a/data/retspraksis/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 4362, - "number_of_tokens": 56263026, - "min_length_tokens": 298, - "max_length_tokens": 979662, - "number_of_characters": 201907995, - "min_length_characters": 892, - "max_length_characters": 6583837 -} \ No newline at end of file diff --git a/data/retspraksis/images/dist_document_length.png b/data/retspraksis/images/dist_document_length.png deleted file mode 100644 index e243e9d91ab445750ebe2c0c6c9bd9637dc868c6..0000000000000000000000000000000000000000 --- a/data/retspraksis/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a8eb1c7c9b456fad5b8f8ab4a4ccac284f3d163f32b8f2f84d29d69eebd8681e -size 558534 diff --git a/data/retspraksis/retspraksis.md b/data/retspraksis/retspraksis.md index c918a5d5cff5360c4b2df508ce517e8282363b26..17b45ae250fc5a961ae04d84f1ce88fd26784931 100644 --- a/data/retspraksis/retspraksis.md +++ b/data/retspraksis/retspraksis.md @@ -1,96 +1,56 @@ --- pretty_name: retspraksis (Danish legal information) language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Legal + - language-modeling --- - -# Dataset Card for retspraksis - - -Case law or judical practice in Denmark derived from [Retspraksis](https://da.wikipedia.org/wiki/Retspraksis). - - - -It encompasses the body of legal decisions made by Danish courts, which play a significant role in interpreting and applying the law. - - +# Dataset Card for retspraksis (Danish legal information) ## Dataset Description - - - -- **Number of samples**: 4.36K -- **Number of tokens (Llama 3)**: 56.26M -- **Average document length in tokens (min, max)**: 12.90K (298, 979.66K) - - - - -## Dataset Structure +- **Number of records:** 4413 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "retspraksis_517", - "text": " højesterets dom\n afsagt torsdag den 6. december [...]", - "source": "retspraksis", - "added": "2020-09-24", - "created": "2000-01-01, 2022-01-01", - "token_count": 10838 + 'text': 'højesterets dom + afsagt tor', + 'source': 'retspraksis', + 'id': 'retspraksis_517', + 'added': '2020-09-24', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Legal', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'retspraksis (Danish legal information)' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## Data Fields +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -### Dataset Statistics +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/retspraksis/retspraksis.parquet b/data/retspraksis/retspraksis.parquet index 0cbf6d00f7ab416da50cdb02d4723760b0cbf474..2420329e171f8f9c654d6679ad614c6a455d287e 100644 --- a/data/retspraksis/retspraksis.parquet +++ b/data/retspraksis/retspraksis.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f5c26d7e176da5d28f187885e05671ed549ba9ba653a3b86dafb1752807aed19 -size 86410527 +oid sha256:28f86c894204d6c1348a5fdfae7b69d1d355ba311e42d70fd669d52138b95d3a +size 87674092 diff --git a/data/skat/descriptive_stats.json b/data/skat/descriptive_stats.json deleted file mode 100644 index 2dea8a5cbcbd5eead293e0ffc96663394b16aa82..0000000000000000000000000000000000000000 --- a/data/skat/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 14715, - "number_of_tokens": 122113781, - "min_length_tokens": 2, - "max_length_tokens": 175223, - "number_of_characters": 353560210, - "min_length_characters": 1, - "max_length_characters": 517882 -} \ No newline at end of file diff --git a/data/skat/images/dist_document_length.png b/data/skat/images/dist_document_length.png deleted file mode 100644 index 5036f8b2577a994d7a57317c5027b6068993ef2a..0000000000000000000000000000000000000000 --- a/data/skat/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:71a06d2079468d93a4aa6418f0bf08ae520e108bc7a3ba37f0d8aca12b647067 -size 540667 diff --git a/data/skat/skat.md b/data/skat/skat.md index 53b0cbfbd789c0f23a839628d014d13c6dcb8187..7ccc4a3a5dc2a7d17b097802fef445b054d2292f 100644 --- a/data/skat/skat.md +++ b/data/skat/skat.md @@ -1,92 +1,55 @@ --- -pretty_name: skat.dk +pretty_name: Skat (Danish tax authority) language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 10k-100k + - 10k-100k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Legal + - language-modeling --- - -# Dataset Card for skat.dk - - -Skat is the Danish tax authority. This dataset contains content from its website skat.dk. - - - +# Dataset Card for Skat (Danish tax authority) ## Dataset Description - - - -- **Number of samples**: 14.71K -- **Number of tokens (Llama 3)**: 122.11M -- **Average document length in tokens (min, max)**: 8.30K (2, 175.22K) - - - - -## Dataset Structure +- **Number of records:** 14716 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "skat_SKM2010.712.SKAT", - "text": "Andelsboligforeningers levering af brugsrettigheder til andelshaverne mod betaling af boligafgift si[...]", - "source": "skat", - "added": "2020-10-01", - "created": "2000-01-01, 2022-01-01", - "token_count": 1717 + 'text': 'Andelsboligforeningers levering af brugsrettighede', + 'source': 'skat', + 'id': 'skat_SKM2010.712.SKAT', + 'added': '2020-10-01', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Legal', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Skat (Danish tax authority)' + } } ``` -### Data Fields +## Data Fields -An entry in the dataset consists of the following fields: +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/skat/skat.parquet b/data/skat/skat.parquet index 71b7b57eed87a09b1b9db81684da062a818822cc..9dea70ee9b8778ea615cfe45cadbb09857ca98b6 100644 --- a/data/skat/skat.parquet +++ b/data/skat/skat.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:c48c02ac76ec53a9979d5abfd2112878b88cb7814d6916fe2a933f7d225118ec -size 164771689 +oid sha256:5f87f38f90553725c889080b3def8e24dadd3b2eaee28b43bae2a19493cf2143 +size 165069920 diff --git a/data/spont/descriptive_stats.json b/data/spont/descriptive_stats.json deleted file mode 100644 index 7638f2953bc56a8bf80701e83fcbf899ed464823..0000000000000000000000000000000000000000 --- a/data/spont/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 411, - "number_of_tokens": 1555942, - "min_length_tokens": 85, - "max_length_tokens": 14025, - "number_of_characters": 4001824, - "min_length_characters": 183, - "max_length_characters": 37603 -} \ No newline at end of file diff --git a/data/spont/images/dist_document_length.png b/data/spont/images/dist_document_length.png deleted file mode 100644 index 2514880333e173dcf77069cdf2da5dd4b44331a3..0000000000000000000000000000000000000000 --- a/data/spont/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0ffa6dc2bbe575109093dfa9fb46129625dfc88ff87082e8eb2c8bf0749b2598 -size 530053 diff --git a/data/spont/spont.md b/data/spont/spont.md index 3374049f9c596194e9c67873e0c922ac14239905..169eaf82722e41ab47d0e32041f035749c76a46e 100644 --- a/data/spont/spont.md +++ b/data/spont/spont.md @@ -1,111 +1,58 @@ --- pretty_name: Spontaneous speech language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Conversation -- Spoken + - language-modeling --- - # Dataset Card for Spontaneous speech - - -Conversational samples collected as a part of research projects at Aarhus University. - - - -The conversational corpus included originates from interdisciplinary research conducted within the [Interacting Minds Centre](https://interactingminds.au.dk), -and [the Puzzle of Danish project](https://projects.au.dk/the-puzzle-of-danish/) at Aarhus University. Transcribed Danish speech is generally a rare kind of data, -and spontaneous speech especially so; these manually transcribed conversations thus form a valuable resource. Spontaneous and pseudo-spontaneous conversations -come from various contexts, e.g., getting to know each other, solving a puzzle together, or making joint decisions. The participants have agreed on -releasing anonymized transcripts of their conversations. All conversations involve two speakers, sometimes conversing face-to-face, sometimes via a chat tool. -Speech is transcribed post-hoc by native speakers. Studies published relying on this data include -[Fusaroli et al. (2012)](https://journals.sagepub.com/doi/10.1177/0956797612436816), -[Dideriksen et al. (2019)](https://pure.au.dk/ws/portalfiles/portal/167670567/Dideriksen_et_al..pdf), and -[Tylén et al. (2016)](https://pure.au.dk/ws/portalfiles/portal/101787937/The_Social_Route_To_Abstraction.pdf). - - - ## Dataset Description - - - -- **Number of samples**: 411 -- **Number of tokens (Llama 3)**: 1.56M -- **Average document length in tokens (min, max)**: 3.79K (85, 14.03K) - - - - -## Dataset Structure +- **Number of records:** 411 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "spont_PuzzleOfDanish132", - "text": "Taler 6: mm\nTaler 7: er du klar?\nTaler 6: ja\nTaler 7: så er spørgsmålet om vi skal- om det er sådan [...]", - "source": "spont", - "added": "2020-01-21", - "created": "2019-01-01, 2020-01-01", - "token_count": 3902 + 'text': 'Taler 6: mm +Taler 7: er du klar? +Taler 6: ja +Taler', + 'source': 'spont', + 'id': 'spont_PuzzleOfDanish132', + 'added': '2020-01-21', + 'created': '2019-01-01, 2020-01-01', + 'metadata': { + 'domain': 'Conversation', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Spontaneous speech' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: +## Data Fields -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -### Dataset Statistics +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code - -

- +CC0 1.0 Universal

- - - -### Formatting - -To represent speakers in the text files, prefix each turn with “TALER 1:” (substituting whatever ID is appropriate). -Note: there is no space before the colon; use one space after the colon. Speaker IDs should be consistent -throughout all documents in a section. Speaker IDs need only be unique within a section, not universally. - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/spont/spont.parquet b/data/spont/spont.parquet index 55fc637f20aa415edd18c3bdc2df9c85cda64739..5825430938423600e6f901d7316bcd901e4cf08c 100644 --- a/data/spont/spont.parquet +++ b/data/spont/spont.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a449836c80c99f439fd8b43205f164c2f16ffe8aa7793026a19200cf6aedab40 -size 1806926 +oid sha256:0ac515b1dedc78fb9123bffbab2cf3c0fe1e126a070ad342d7d0c707096e838b +size 1814921 diff --git a/data/synne/descriptive_stats.json b/data/synne/descriptive_stats.json deleted file mode 100644 index 128032e001d639d4c77bd45beb4a58e999a761f1..0000000000000000000000000000000000000000 --- a/data/synne/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 177, - "number_of_tokens": 52017, - "min_length_tokens": 128, - "max_length_tokens": 891, - "number_of_characters": 138484, - "min_length_characters": 333, - "max_length_characters": 2213 -} \ No newline at end of file diff --git a/data/synne/images/dist_document_length.png b/data/synne/images/dist_document_length.png deleted file mode 100644 index 27362bc9e79756600fa8a2f176aecb51074a7e81..0000000000000000000000000000000000000000 --- a/data/synne/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:104c50abc42b7227d52eba2591abee32c469e5b7a1ba3cb4c3190e43b2bed250 -size 530261 diff --git a/data/synne/synne.md b/data/synne/synne.md index 2bdad7b260527c4d3e06b669fa05e475a862e631..db58444d9abfd3b5261b84516a41e5cfa987020e 100644 --- a/data/synne/synne.md +++ b/data/synne/synne.md @@ -1,92 +1,55 @@ --- -pretty_name: Synnejysk Forening +pretty_name: Synderjysk (Danish dialect) language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Other + - language-modeling --- - -# Dataset Card for synnejysk Forening - - -Dataset collected from [synnejysk forening's website](https://www.synnejysk.dk), covering the Danish dialect sønderjysk. - - - +# Dataset Card for Synderjysk (Danish dialect) ## Dataset Description - - - -- **Number of samples**: 177 -- **Number of tokens (Llama 3)**: 52.02K -- **Average document length in tokens (min, max)**: 293.8813559322034 (128, 891) - - - - -## Dataset Structure +- **Number of records:** 178 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "synne_forening_0140", - "text": "Mangeægskage Hent printvenligt dokument her – Klik her Som medlem af Æ Synnejysk Forening er du med [...]", - "source": "synne", - "added": "2020-06-26", - "created": "2000-01-01, 2022-01-01", - "token_count": 144 + 'text': 'Mangeægskage Hent printvenligt dokument her – Klik', + 'source': 'synne', + 'id': 'synne_forening_0140', + 'added': '2020-06-26', + 'created': '2000-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Other', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Synderjysk (Danish dialect)' + } } ``` -### Data Fields +## Data Fields -An entry in the dataset consists of the following fields: +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/synne/synne.parquet b/data/synne/synne.parquet index e9ad25520df881d5fca65bd6029e8aa4dcdcb897..42260ffe3e7a2731adf462ef84d5e071f8469812 100644 --- a/data/synne/synne.parquet +++ b/data/synne/synne.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0f6028ada9958c4b9680ebce641fd0a0abc8c43c2e581b20d6f655673909eed1 -size 73823 +oid sha256:701bf010bca88dd4722ffa72404b91e24703bd9552003371771bf1823dc58138 +size 77042 diff --git a/data/tv2r/descriptive_stats.json b/data/tv2r/descriptive_stats.json deleted file mode 100644 index aafd73adfe1a023dab39440aba42d744e4c36c36..0000000000000000000000000000000000000000 --- a/data/tv2r/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 49134, - "number_of_tokens": 21670824, - "min_length_tokens": 16, - "max_length_tokens": 5267, - "number_of_characters": 64040849, - "min_length_characters": 31, - "max_length_characters": 16140 -} \ No newline at end of file diff --git a/data/tv2r/images/dist_document_length.png b/data/tv2r/images/dist_document_length.png deleted file mode 100644 index 81fd23c8309b399f385de189c567e408322c2352..0000000000000000000000000000000000000000 --- a/data/tv2r/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5f406f6bdf3278dccc4b83cff96473661275ade1a94194ebb78915615948669f -size 533630 diff --git a/data/tv2r/tv2r.md b/data/tv2r/tv2r.md index 45bac2541b860a5ccc27b14c8c5c2a1f199e4e08..e2fab12cf62485ea1440a30459980c5c2d8e69f8 100644 --- a/data/tv2r/tv2r.md +++ b/data/tv2r/tv2r.md @@ -1,104 +1,55 @@ --- -pretty_name: TV 2 Radio +pretty_name: TV 2 Radio (Danish news) language: -- da + - da license: cc-by-sa-4.0 -license_name: CC-BY-SA 4.0 +license_name: Creative Commons Attribution Share Alike 4.0 size_categories: -- 10k-100k + - 10k-100k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- News + - language-modeling --- - -# Dataset Card for TV 2 Radio - +# Dataset Card for TV 2 Radio (Danish news) ## Dataset Description - - -Contemporary Danish newswire articles published between 2010 and 2019. - - - -It contains articles of regional interest, written following editorial standards. This section’s value is in both its temporal variation, covering a decade of events, and its spatial variation, covering many local events across most of Denmark (TV2 Bornholm is excluded). As a result of local event coverage, the section contains many locally relevant named entities, which might otherwise not be present in a dataset of national news. - - - -- **Number of samples**: 49.13K -- **Number of tokens (Llama 3)**: 21.67M -- **Average document length in tokens (min, max)**: 441.055562339724 (16, 5.27K) - - - - -## Dataset Structure +- **Number of records:** 49137 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "tv2r_92548", - "text": "Storken er landet\n02 april 2017 kl. 17.58\nSøndag aften er storken Annika landet i sin rede ved Smeda[...]", - "source": "tv2r", - "added": "2019-11-13", - "created": "2015-01-01, 2020-01-01", - "token_count": 465 + 'text': 'Storken er landet +02 april 2017 kl. 17.58 +Søndag a', + 'source': 'tv2r', + 'id': 'tv2r_92548', + 'added': '2019-11-13', + 'created': '2015-01-01, 2020-01-01', + 'metadata': { + 'domain': 'News', + 'license': 'The owner of this content is TV2 Regionerne, Denmark. +Creative Commons Attribution 4.0 International', + 'source-pretty': 'TV 2 Radio (Danish news)' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## Data Fields +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. ## License Information
-CC-BY-SA 4.0 +Creative Commons Attribution Share Alike 4.0

The owner of this content is TV2 Regionerne, Denmark. Creative Commons Attribution 4.0 International

- -### Dataset Statistics - - -

- -

- - - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/tv2r/tv2r.parquet b/data/tv2r/tv2r.parquet index 99c28c7da8dc6572be63254afe8de828816eb437..e1da3a3df31c9917a2308695576ac54883137230 100644 --- a/data/tv2r/tv2r.parquet +++ b/data/tv2r/tv2r.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a94fc259d4049515e6ed906f7a195676977fda86bd69e9d1a50de0609aaeafce -size 40465603 +oid sha256:e5cc87b9de1c11ef580d939d1d877b5553d3c75aa33d0f2e280986b8787900a5 +size 40686259 diff --git a/data/wiki-comments/create.py b/data/wiki-comments/create.py deleted file mode 100644 index edb77ac5478a819f2a25f45964cc40909395d603..0000000000000000000000000000000000000000 --- a/data/wiki-comments/create.py +++ /dev/null @@ -1,137 +0,0 @@ -# /// script -# requires-python = "==3.12" -# dependencies = [ -# "datasets==3.2.0", -# "dynaword", -# "fasttext", -# "huggingface_hub" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "6b3822fd6965dda59ae361da99c19b5c56c1263f" } -# /// -""" -This script downloads and cleans Wikipedia, it only keeps the user comments. -""" - -import html -import json -import os - -import fasttext -import requests -from datasets import Dataset -from huggingface_hub import hf_hub_download - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - - -def run_cmd(cmd): - print(cmd) - os.system(cmd) - - -def download_data(lang, date): - filename = lang + "wiki-" + date + "-pages-articles.xml.bz2" - if not os.path.isfile(filename): - url = "https://dumps.wikimedia.org/" + lang + "wiki/" + date + "/" + filename - response = requests.get(url) - - with open(filename, "wb") as file: - file.write(response.content) - - return filename - - -def install_wikiextractor(): - if not os.path.isdir("wikiextractor"): - cmd = "git clone https://github.com/robvanderg/wikiextractor.git" - run_cmd(cmd) - - -def run_wikiextractor(in_path, out_path): - # clean the data - if not os.path.isdir("wikiextractor/" + out_path): - cmd = ( - "cd wikiextractor && python3 -m wikiextractor.WikiExtractor ../" - + in_path - + " -o " - + out_path - + " --get_misc --json && cd ../ " - ) - run_cmd(cmd) - - -def read_and_clean(path): - # Load fasttext model - model_path = hf_hub_download( - repo_id="facebook/fasttext-language-identification", filename="model.bin" - ) - fasttext_model = fasttext.load_model(model_path) - - comment_id = 0 - all_rows = [] - for root, dirs, files in os.walk(path, topdown=True): - for file in files: - path = os.path.join(root, file) - for line in open(path): - linedata = json.loads(line) - title = linedata["title"] - category = title.split(":")[0] - if category == "Wikipedia": - if title.startswith("Wikipedia:Dagens "): - continue - id = "wikicomment_" + str(comment_id) - comment_id += 1 - else: # There is more data, but we just want to comments for now - continue - source = "wiki_misc" - # TODO add linedata['url'] somewhere? - text = html.unescape(linedata["text"]) - lines = line.split("\n") # noqa - filtered_text = "" - for line in text.split("\n"): - if "{{}}" in text: # unresolved templates - continue - - lang_pred = fasttext_model.predict(line) - if lang_pred[0][0] == "__label__dan_Latn" and lang_pred[1][0] > 0.5: - filtered_text += line.replace("[", "").replace("]", "") + "\n" - added = "2025-07-21" - created = "2002-02-01, 2025-07-20" - row = { - "id": id, - "text": filtered_text, - "source": source, - "added": added, - "created": created, - } - all_rows.append(row) - return all_rows - - -if __name__ == "__main__": - date = "20250720" # obtained from https://dumps.wikimedia.org/dawiki/ - lang = "da" - bz2_path = download_data(lang, date) - - install_wikiextractor() - data_folder = lang + "wiki-misc-" + date - run_wikiextractor(bz2_path, data_folder) - - full_data = read_and_clean("wikiextractor/" + data_folder) - print(len(full_data)) - exit(1) - - ds = Dataset.from_list(full_data) - - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = add_token_count(ds) - ds = ensure_column_order(ds) - - ds.to_parquet("data/wiki-comments/wiki-comments.parquet") diff --git a/data/wiki-comments/descriptive_stats.json b/data/wiki-comments/descriptive_stats.json deleted file mode 100644 index 71200a603cf6516c28417eae2a17b0956ebed10d..0000000000000000000000000000000000000000 --- a/data/wiki-comments/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 11592, - "number_of_tokens": 6135139, - "min_length_tokens": 4, - "max_length_tokens": 27781, - "number_of_characters": 18190596, - "min_length_characters": 6, - "max_length_characters": 80999 -} \ No newline at end of file diff --git a/data/wiki-comments/images/dist_document_length.png b/data/wiki-comments/images/dist_document_length.png deleted file mode 100644 index 4d2ac6ac98d33e974681074e0c9fd6e41fe6348f..0000000000000000000000000000000000000000 --- a/data/wiki-comments/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c208772863c1e8558eefa047fc837f728deba62993f33768d09060ee1ced1fee -size 179122 diff --git a/data/wiki-comments/wiki-comments.md b/data/wiki-comments/wiki-comments.md deleted file mode 100644 index 79ec57c284f5ed809b2c8036ed7166861423e051..0000000000000000000000000000000000000000 --- a/data/wiki-comments/wiki-comments.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -pretty_name: Wikipedia Comments -language: -- da -license: cc0-1.0 -license_name: CC-0 -size_categories: -- 100k-1M -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -domains: -- Encyclopedic ---- - -# Dataset Card for Wikipedia Comments - - -Text from the comments sections of the Danish Wikipedia. - - - -You can read more about Danish Wikipedia on their [about](https://da.wikipedia.org/wiki/Hj%C3%A6lp:Om) page. This dataset contains meta-discussion as opposed to the direct pages. This includes discussions about contents of specific pages, but also questions about how to achieve certain things. An example page can be found on: https://da.wikipedia.org/wiki/Wikipedia:Landsbybr%C3%B8nden/Vejledning_efterlyses. For this specific version, the Wikipedia downloads of 20250720 have been used, this can easily be updated in the `create.py` script. - -## Dataset Description - - - -- **Number of samples**: 11.59K -- **Number of tokens (Llama 3)**: 6.14M -- **Average document length in tokens (min, max)**: 529.26 (4, 27.78K) - - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "wikicomment_0", - "text": "## Vejledning efterlyses.\nJeg har ledt og ledt alle steder efter tip om, hvordan man kan bruge et bi[...]", - "source": "wiki_misc", - "added": "2025-07-21", - "created": "2002-02-01, 2025-07-20", - "token_count": 385 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - -### Dataset Statistics - - -

- -

- - - - -## Additional Information - -This dataset is collected using an adapted version of the [WikiExtractor](https://github.com/attardi/wikiextractor). Rob van der Goot created a fork that allows for extracting additional text from Wiki's. The fork can be found here: [WikiExtractor](https://github.com/robvanderg/wikiextractor.git). - -After inspection of the different outputs, there are multiple categories of files, which can most easily be distinguished through the title field. Below, I list the different categories, their size (number of pages), and what they seem to contain after a manual inspection. - -``` - 71472 Kategori: category overview pages - 19992 Wikipedia: Comments, but also daily articles - 2379 Portal: Also monthly articles, and some lists/calendars - 1360 MediaWiki: About files, contains almost no natural language - 726 Modul: technical stuff, contains almost no (Danish) text - 171 Hjælp: help pages; info and comments -``` - -In the current version of the dataset, we used the titles starting with `Wikipedia:` , and remove the daily articles by leaving out titles starting with "Wikipedia:Dagens". - -In this data we include comments where people discuss things like: content of pages, writing style, which pages/information to include/exclude, etc. It also includes pages written for people that contribute to Wikipedia. - - -## Opportunities for Improvement -The wikiextractor outputs some incomplete sentences, where entities seem to be missing. This is a known issue: https://github.com/attardi/wikiextractor/issues/33 , but in the Wikipedia section of DynaWord, other extractors seems to have given better results, so it would be nice if they could be adapted to extract comments as well. We have choosen to remove entire lines when we know that words are missing, to avoid disrupting the context. - -Language filtering is done with pre-trained fastText, which is definitely not a perfect solution (but the majority of text should already be Danish, and quality of English-Danish should be ok). - diff --git a/data/wiki-comments/wiki-comments.parquet b/data/wiki-comments/wiki-comments.parquet deleted file mode 100644 index 1b2061e73f71f39cf5bba7d622567a0761035fcd..0000000000000000000000000000000000000000 --- a/data/wiki-comments/wiki-comments.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:64c8b39dcd96ef6ac2bcfb5403cf9cd74f2ee17b954de3478f543922ee8dea5e -size 10828848 diff --git a/data/wiki/wiki.md b/data/wiki/wiki.md new file mode 100644 index 0000000000000000000000000000000000000000..c21353f947e9f30d5e2f258539c13d3e9cd3edc0 --- /dev/null +++ b/data/wiki/wiki.md @@ -0,0 +1,55 @@ +--- +pretty_name: Wikipedia +language: + - da +license: cc0-1.0 +license_name: Creative Commons Zero v1.0 Universal +size_categories: + - 100k-1M +task_categories: + - text-generation + - fill-mask +task_ids: + - language-modeling +--- +# Dataset Card for Wikipedia +## Dataset Description +- **Number of records:** 264502 +- **Languages:** Danish +## Dataset Sturcture +An example from the dataset looks as follows. +```yaml +{ + 'text': 'Vimoutiers er en kommune i departementet Orne i Ba', + 'source': 'wiki', + 'id': 'wiki_366127', + 'added': '2021-03-28', + 'created': '2019-01-01, 2021-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Wikipedia' + } +} +``` + +## Data Fields + +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. + +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code + +CC0 1.0 Universal +

+
diff --git a/data/wiki/wiki.parquet b/data/wiki/wiki.parquet new file mode 100644 index 0000000000000000000000000000000000000000..3950cd4f3efdcf7dbb0c4c16354430fff523feba --- /dev/null +++ b/data/wiki/wiki.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41bb02c5b10290746b00750db69c565bfe25fda2529efcc603f108d820dc6c13 +size 242917206 diff --git a/data/wikibooks/create.py b/data/wikibooks/create.py deleted file mode 100644 index 4592717fbcb556b15fd9a44e27d1834c42568bff..0000000000000000000000000000000000000000 --- a/data/wikibooks/create.py +++ /dev/null @@ -1,251 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "mediawiki-dump", -# "mwparserfromhell", -# "pandas", -# "requests", -# "tqdm", -# "transformers", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// -import bz2 -import datetime -import json -import logging -import os -import re -import subprocess -import sys -import threading - -from mediawiki_dump.dumps import IteratorDump -from mediawiki_dump.reader import DumpReaderArticles - -from mwparserfromhell import parse - -import requests -from tqdm import tqdm -import pandas as pd - -from datasets import Dataset -from typing import Tuple - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) -source = "wikibooks" - - -class WtfNodeBridge: - """ - Persistent Node bridge to wtf_wikipedia. - Call .parse(wikitext, lang=None) -> (text, is_redirect) - Remember to call .close() when done. - """ - - def __init__( - self, node_script_path: str = "parser/wtf_bridge.js", node_cmd: str = "node" - ): - self.proc = subprocess.Popen( - [node_cmd, node_script_path], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - encoding="utf-8", - bufsize=1, - ) - self._lock = threading.Lock() - - # background thread to log stderr (helpful for debugging) - def _drain_stderr(p): - try: - for line in p.stderr: - logger.warning("wtf_node stderr: %s", line.rstrip()) - except Exception: - pass - - t = threading.Thread(target=_drain_stderr, args=(self.proc,), daemon=True) - t.start() - - def parse(self, wikitext: str, lang: str | None = None) -> Tuple[str, bool]: - if self.proc.poll() is not None: - raise RuntimeError("Node bridge process has exited") - payload = {"wikitext": wikitext} - if lang: - payload["lang"] = lang - line = json.dumps(payload, ensure_ascii=False) - with self._lock: - # write and flush a single JSON line - try: - self.proc.stdin.write(line + "\n") - self.proc.stdin.flush() - except BrokenPipeError as e: - raise RuntimeError("Broken pipe writing to node bridge") from e - - # read exactly one JSON line back - out_line = self.proc.stdout.readline() - if not out_line: - raise RuntimeError("No response from node bridge (it may have exited)") - res = json.loads(out_line) - if res.get("error"): - # choose to either raise or return empty text; here we raise - raise RuntimeError("Node bridge error: " + res["error"]) - return res.get("text", ""), bool(res.get("isRedirect", False)) - - def close(self): - try: - if self.proc.stdin: - self.proc.stdin.close() - except Exception: - pass - try: - self.proc.terminate() - self.proc.wait(timeout=3) - except Exception: - try: - self.proc.kill() - except Exception: - pass - - -def download_wiki_dump(url: str, file_path: str): - """ - Downloads a file from a URL with a progress bar. - - Args: - url (str): The URL of the file to download. - file_path (str): The local path to save the file. - """ - print(f"Downloading {url} to {file_path}...") - try: - with requests.get(url, stream=True) as r: - r.raise_for_status() - total_size = int(r.headers.get("content-length", 0)) - block_size = 8192 # 8 Kibibytes - - with ( - open(file_path, "wb") as f, - tqdm( - total=total_size, unit="B", unit_scale=True, desc="Downloading" - ) as pbar, - ): - for chunk in r.iter_content(chunk_size=block_size): - if chunk: # filter out keep-alive chunks - f.write(chunk) - pbar.update(len(chunk)) - logger.info("\nDownload complete.") - except requests.exceptions.RequestException as e: - logger.error(f"\nError downloading file: {e}") - sys.exit(1) - - -def get_content(file_name: str): - with bz2.open(file_name, mode="r") as fp: - yield from fp - - -def process_dump_to_parquet(bz2_file_path: str, parquet_file_path: str): - dump = IteratorDump(iterator=get_content(file_name=bz2_file_path)) - pages = DumpReaderArticles().read(dump) - articles = [] - today = datetime.datetime.now().strftime("%Y-%m-%d") - - bridge = WtfNodeBridge("parser/wtf_bridge.js") - try: - for page in tqdm(pages): - try: - plain_text, is_redirect = bridge.parse(page.content, lang="da") - except Exception as exc: - logger.warning( - "wtf parse failed for page %s: %s -- falling back to mwparserfromhell", - getattr(page, "title", ""), - exc, - ) - plain_text = parse(page.content).strip_code().strip() - is_redirect = plain_text.startswith("REDIRECT") - - if is_redirect: - continue - - # Additional cleanup if you like (wtf.text() already removes a lot) - plain_text = re.sub( - r"thumb(?:\|(?:left|right|center|\d+px)*)*\|[^\n]*", "", plain_text - ).strip() - - if len(plain_text) == 0: - logger.warning("Skipping empty article") - continue - - date = datetime.datetime.strptime( - page.timestamp, "%Y-%m-%dT%H:%M:%SZ" - ).strftime("%Y-%m-%d") - - articles.append( - { - "id": f"{source}_{page.page_id}", - "source": source, - "created": f"{date}, {date}", - "text": f"{page.title}\n{plain_text}", - "added": today, - } - ) - finally: - bridge.close() - - df = pd.DataFrame(articles) - ds = Dataset.from_pandas(df) - ds = add_token_count(ds) - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = ensure_column_order(ds) - ds.to_parquet(parquet_file_path, compression="snappy") - - -def main(): - """ - Main function to orchestrate the download and processing. - """ - # --- Configuration --- - # URL for the latest Danish Wikipedia articles dump - WIKI_DUMP_URL = f"https://dumps.wikimedia.org/da{source}/latest/da{source}-latest-pages-articles.xml.bz2" - - # Local file paths - DOWNLOADED_BZ2_FILE = f"tmp/da{source}-latest-pages-articles.xml.bz2" - OUTPUT_PARQUET_FILE = f"{source}.parquet" - - # --- Execution --- - # 1. Download the dump file - if not os.path.exists(DOWNLOADED_BZ2_FILE): - download_wiki_dump(WIKI_DUMP_URL, DOWNLOADED_BZ2_FILE) - else: - print(f"File '{DOWNLOADED_BZ2_FILE}' already exists. Skipping download.") - - # 2. Process the dump and save to Parquet - process_dump_to_parquet(DOWNLOADED_BZ2_FILE, OUTPUT_PARQUET_FILE) - - # 3. (Optional) Clean up the downloaded file - # If you want to keep the bz2 file, comment out the next line. - print(f"Cleaning up by removing '{DOWNLOADED_BZ2_FILE}'...") - os.remove(DOWNLOADED_BZ2_FILE) - - print("\nScript finished successfully.") - - -if __name__ == "__main__": - # Before running, make sure you have the required libraries installed: - # pip install requests mwparserfromhell pandas pyarrow - main() diff --git a/data/wikibooks/descriptive_stats.json b/data/wikibooks/descriptive_stats.json deleted file mode 100644 index 0cf94c4aaf3ebb451d9af17b3fdd59d10d309b53..0000000000000000000000000000000000000000 --- a/data/wikibooks/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 1734, - "number_of_tokens": 7632997, - "min_length_tokens": 8, - "max_length_tokens": 368880, - "number_of_characters": 21572592, - "min_length_characters": 21, - "max_length_characters": 1019370 -} \ No newline at end of file diff --git a/data/wikibooks/images/dist_document_length.png b/data/wikibooks/images/dist_document_length.png deleted file mode 100644 index 116795f9dacce4a776b003f46c4f5f4bf6f895d0..0000000000000000000000000000000000000000 --- a/data/wikibooks/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:535a84a77b35475811b1d08b6b9511a1764951d4a86e1839a4e2f9f262dac207 -size 547085 diff --git a/data/wikibooks/parser/package-lock.json b/data/wikibooks/parser/package-lock.json deleted file mode 100644 index 9969047897d97e95f057e2fa9c80647e6196718c..0000000000000000000000000000000000000000 --- a/data/wikibooks/parser/package-lock.json +++ /dev/null @@ -1,724 +0,0 @@ -{ - "name": "parser", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "parser", - "version": "1.0.0", - "license": "ISC", - "dependencies": { - "wtf_wikipedia": "^10.4.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@types/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", - "license": "MIT" - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", - "license": "MIT" - }, - "node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/camelcase-keys": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-7.0.2.tgz", - "integrity": "sha512-Rjs1H+A9R+Ig+4E/9oyB66UC5Mj9Xq3N//vcLf2WzgdTi/3gUu3Z9KoqmlrEG4VuuLK8wJHofxzdQXz/knhiYg==", - "license": "MIT", - "dependencies": { - "camelcase": "^6.3.0", - "map-obj": "^4.1.0", - "quick-lru": "^5.1.1", - "type-fest": "^1.2.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-5.0.1.tgz", - "integrity": "sha512-VfxadyCECXgQlkoEAjeghAr5gY3Hf+IKjKb+X8tGVDtveCjN+USwprd2q3QXBR9T1+x2DG0XZF5/w+7HAtSaXA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", - "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", - "license": "MIT", - "dependencies": { - "decamelize": "^1.1.0", - "map-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize-keys/node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys/node_modules/map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up/node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hard-rejection": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", - "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/indent-string": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", - "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "license": "MIT" - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/isomorphic-unfetch": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz", - "integrity": "sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==", - "license": "MIT", - "dependencies": { - "node-fetch": "^2.6.1", - "unfetch": "^4.2.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "license": "MIT" - }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "license": "MIT" - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/map-obj": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", - "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow": { - "version": "10.1.5", - "resolved": "https://registry.npmjs.org/meow/-/meow-10.1.5.tgz", - "integrity": "sha512-/d+PQ4GKmGvM9Bee/DPa8z3mXs/pkvJE2KEThngVNOqtmljC6K7NMPxtc2JeZYTmpWb9k/TmxjeL18ez3h7vCw==", - "license": "MIT", - "dependencies": { - "@types/minimist": "^1.2.2", - "camelcase-keys": "^7.0.0", - "decamelize": "^5.0.0", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "4.1.0", - "normalize-package-data": "^3.0.2", - "read-pkg-up": "^8.0.0", - "redent": "^4.0.0", - "trim-newlines": "^4.0.2", - "type-fest": "^1.2.2", - "yargs-parser": "^20.2.9" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/minimist-options": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", - "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", - "license": "MIT", - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0", - "kind-of": "^6.0.3" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-exists": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", - "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/path-exists-cli": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-exists-cli/-/path-exists-cli-2.0.0.tgz", - "integrity": "sha512-qGr0A87KYCznmvabblxyxnzA/MtPZ28wH+4SCMP4tjTFAbzqwvs5xpUZExAYzq5OgHe5vIswzdH5iosCb8YF/Q==", - "license": "MIT", - "dependencies": { - "meow": "^10.1.1", - "path-exists": "^5.0.0" - }, - "bin": { - "path-exists": "cli.js" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-6.0.0.tgz", - "integrity": "sha512-X1Fu3dPuk/8ZLsMhEj5f4wFAF0DWoK7qhGJvgaijocXxBmSToKfbFtqbxMO7bVjNA1dmE5huAzjXj/ey86iw9Q==", - "license": "MIT", - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^3.0.2", - "parse-json": "^5.2.0", - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-8.0.0.tgz", - "integrity": "sha512-snVCqPczksT0HS2EC+SxUndvSzn6LRCwpfSvLrIfR5BKDQQZMaI6jPRC9dYvYFDRAuFEAnkwww8kBBNE/3VvzQ==", - "license": "MIT", - "dependencies": { - "find-up": "^5.0.0", - "read-pkg": "^6.0.0", - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/redent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-4.0.0.tgz", - "integrity": "sha512-tYkDkVVtYkSVhuQ4zBgfvciymHaeuel+zFKXShfDnFP5SyVEP7qo70Rf1jTOTCx3vGNAbnEi/xFkcfQVMIBWag==", - "license": "MIT", - "dependencies": { - "indent-string": "^5.0.0", - "strip-indent": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", - "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", - "license": "CC0-1.0" - }, - "node_modules/strip-indent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", - "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", - "license": "MIT", - "dependencies": { - "min-indent": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "license": "MIT" - }, - "node_modules/trim-newlines": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-4.1.1.tgz", - "integrity": "sha512-jRKj0n0jXWo6kh62nA5TEh3+4igKDXLvzBJcPpiizP7oOolUrYIxmVBG9TOtHYFHoddUk6YvAkGeGoSVTXfQXQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/unfetch": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/unfetch/-/unfetch-4.2.0.tgz", - "integrity": "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==", - "license": "MIT" - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wtf_wikipedia": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/wtf_wikipedia/-/wtf_wikipedia-10.4.0.tgz", - "integrity": "sha512-yRxTiBURj2LW5HWAe+T7bCV2x45C/qTqcknUTmInKmB9cmLSxR6Nh44rB9K+nfNiydtjc3HLHwYWxMuHZtpVSQ==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "isomorphic-unfetch": "^3.1.0", - "path-exists-cli": "2.0.0" - }, - "bin": { - "wtf_wikipedia": "cli.js" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - } -} diff --git a/data/wikibooks/parser/package.json b/data/wikibooks/parser/package.json deleted file mode 100644 index 090b8bdfa1172f7f314293c07f0b90c409788e41..0000000000000000000000000000000000000000 --- a/data/wikibooks/parser/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "parser", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "ISC", - "type": "commonjs", - "dependencies": { - "wtf_wikipedia": "^10.4.0" - } -} diff --git a/data/wikibooks/parser/wtf_bridge.js b/data/wikibooks/parser/wtf_bridge.js deleted file mode 100644 index a6630a92615d6098dcd85af8a7e9bcb0e790362d..0000000000000000000000000000000000000000 --- a/data/wikibooks/parser/wtf_bridge.js +++ /dev/null @@ -1,39 +0,0 @@ -// wtf_bridge.js -// Usage: node wtf_bridge.js -// Reads newline-delimited JSON from stdin: {"wikitext":"...","lang":"da"} -// Writes newline-delimited JSON to stdout: {"text":"...","isRedirect":false} or {"error":"..."} -const wtf = require('wtf_wikipedia'); -const readline = require('readline'); - -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false -}); - -process.on('uncaughtException', (err) => { - // emit as JSON so Python can see it - try { - process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); - } catch (e) {} - process.exit(1); -}); - -rl.on('line', (line) => { - (async () => { - try { - const payload = JSON.parse(line); - const wikitext = payload.wikitext || ''; - const lang = payload.lang || null; - - // parse wikitext into a document (sync) - const doc = lang ? wtf(wikitext, lang) : wtf(wikitext); - const text = (doc && typeof doc.text === 'function') ? doc.text() : ''; - const isRedirect = (doc && typeof doc.isRedirect === 'function') ? doc.isRedirect() : false; - - process.stdout.write(JSON.stringify({ text, isRedirect }) + '\n'); - } catch (err) { - process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); - } - })(); -}); \ No newline at end of file diff --git a/data/wikibooks/wikibooks.md b/data/wikibooks/wikibooks.md index 400c16d2f1ae5bafddde20b0653a003f46bd46cb..0ec56bb04e87cce1f20400e454c82062f1c95f2d 100644 --- a/data/wikibooks/wikibooks.md +++ b/data/wikibooks/wikibooks.md @@ -1,107 +1,56 @@ --- pretty_name: Wikibooks language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Books + - language-modeling --- - # Dataset Card for Wikibooks - - -The Danish Subsection of [Wikibooks](https://www.wikibooks.org). - - - ## Dataset Description - - - -- **Number of samples**: 1.73K -- **Number of tokens (Llama 3)**: 7.63M -- **Average document length in tokens (min, max)**: 4.40K (8, 368.88K) - - - - -## Dataset Structure +- **Number of records:** 1319 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "wikibooks_7", - "text": "Boghylde:Hobbies\nHobbies er alle de aktiviteter vi giver os ud i, uden at forvente et afkast ud over[...]", - "source": "wikibooks", - "added": "2025-08-18", - "created": "2006-06-20, 2006-06-20", - "token_count": 95 + 'text': 'Spilinfo. +Spillet er lavet af Blizzard Entertainme', + 'source': 'wikibooks', + 'id': 'wikibooks_1125', + 'added': '2021-03-28', + 'created': '2019-01-01, 2021-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Wikibooks' + } } ``` -### Data Fields - -An entry in the dataset consists of the following fields: +## Data Fields -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -### Processing - -For this dataset we have pulled the latest [database dump from wikimedia](https://dumps.wikimedia.org/dawikibooks/latest/) and extracted the texts using the [wtf_wikipedia](https://github.com/spencermountain/wtf_wikipedia/tree/dev) parser. - -Because the parser is written in javascript you need to have Node.js installed on you machine. - -To run the `create.py` file you first need to do: - -```bash -$ cd parser/ && npm install && cd .. -``` - -We chose to use `wtf_wikipedia` because out of the other parsers we tested this was the imperically best one. We tested `mwparserfromhell`, `mediawiki_dump`, `wikiextractor`, and `wtf_wikipedia`. It seemed that the others still produced some sort of artifacts from the parsing of wikicode. - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/wikibooks/wikibooks.parquet b/data/wikibooks/wikibooks.parquet index 801a371e16b7b8c43024abaca7651d94e13bafab..9e8a65c8a9ac584663602c5fa50bad1ef80c353d 100644 --- a/data/wikibooks/wikibooks.parquet +++ b/data/wikibooks/wikibooks.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:143ce3ed06be9e576d9e0a35eb674595bb835cee162e84884d3c9f45c34dab40 -size 13481948 +oid sha256:5984554e9c048e06cd156903a345178dc18a95572e3b12fb4c6e6266bcc87fa5 +size 11282733 diff --git a/data/wikipedia/create.py b/data/wikipedia/create.py deleted file mode 100644 index 9fca0254d326099d6f861019a4fd630f9df00e72..0000000000000000000000000000000000000000 --- a/data/wikipedia/create.py +++ /dev/null @@ -1,256 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "mediawiki-dump", -# "mwparserfromhell", -# "pandas", -# "requests", -# "tqdm", -# "transformers", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// -import bz2 -import datetime -import json -import logging -import os -from pathlib import Path -import re -import subprocess -import sys -import threading - -from mediawiki_dump.dumps import IteratorDump -from mediawiki_dump.reader import DumpReaderArticles - -from mwparserfromhell import parse - -import requests -from tqdm import tqdm -import pandas as pd - -from datasets import Dataset -from typing import Tuple - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) -source = "wiki" - - -class WtfNodeBridge: - """ - Persistent Node bridge to wtf_wikipedia. - Call .parse(wikitext, lang=None) -> (text, is_redirect) - Remember to call .close() when done. - """ - - def __init__( - self, node_script_path: str = "parser/wtf_bridge.js", node_cmd: str = "node" - ): - self.proc = subprocess.Popen( - [node_cmd, node_script_path], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - encoding="utf-8", - bufsize=1, - ) - self._lock = threading.Lock() - - # background thread to log stderr (helpful for debugging) - def _drain_stderr(p): - try: - for line in p.stderr: - logger.warning("wtf_node stderr: %s", line.rstrip()) - except Exception: - pass - - t = threading.Thread(target=_drain_stderr, args=(self.proc,), daemon=True) - t.start() - - def parse(self, wikitext: str, lang: str | None = None) -> Tuple[str, bool]: - if self.proc.poll() is not None: - raise RuntimeError("Node bridge process has exited") - payload = {"wikitext": wikitext} - if lang: - payload["lang"] = lang - line = json.dumps(payload, ensure_ascii=False) - with self._lock: - # write and flush a single JSON line - try: - self.proc.stdin.write(line + "\n") - self.proc.stdin.flush() - except BrokenPipeError as e: - raise RuntimeError("Broken pipe writing to node bridge") from e - - # read exactly one JSON line back - out_line = self.proc.stdout.readline() - if not out_line: - raise RuntimeError("No response from node bridge (it may have exited)") - res = json.loads(out_line) - if res.get("error"): - # choose to either raise or return empty text; here we raise - raise RuntimeError("Node bridge error: " + res["error"]) - return res.get("text", ""), bool(res.get("isRedirect", False)) - - def close(self): - try: - if self.proc.stdin: - self.proc.stdin.close() - except Exception: - pass - try: - self.proc.terminate() - self.proc.wait(timeout=3) - except Exception: - try: - self.proc.kill() - except Exception: - pass - - -def download_wiki_dump(url: str, file_path: str): - """ - Downloads a file from a URL with a progress bar. - - Args: - url (str): The URL of the file to download. - file_path (str): The local path to save the file. - """ - print(f"Downloading {url} to {file_path}...") - try: - with requests.get(url, stream=True) as r: - r.raise_for_status() - total_size = int(r.headers.get("content-length", 0)) - block_size = 8192 # 8 Kibibytes - - with ( - open(file_path, "wb") as f, - tqdm( - total=total_size, unit="B", unit_scale=True, desc="Downloading" - ) as pbar, - ): - for chunk in r.iter_content(chunk_size=block_size): - if chunk: # filter out keep-alive chunks - f.write(chunk) - pbar.update(len(chunk)) - logger.info("\nDownload complete.") - except requests.exceptions.RequestException as e: - logger.error(f"\nError downloading file: {e}") - sys.exit(1) - - -def get_content(file_name: str): - with bz2.open(file_name, mode="r") as fp: - yield from fp - - -def process_dump_to_parquet(bz2_file_path: str, parquet_file_path: str): - dump = IteratorDump(iterator=get_content(file_name=bz2_file_path)) - pages = DumpReaderArticles().read(dump) - articles = [] - today = datetime.datetime.now().strftime("%Y-%m-%d") - - bridge = WtfNodeBridge("parser/wtf_bridge.js") - try: - for page in tqdm(pages): - try: - plain_text, is_redirect = bridge.parse(page.content, lang="da") - except Exception as exc: - logger.warning( - "wtf parse failed for page %s: %s -- falling back to mwparserfromhell", - getattr(page, "title", ""), - exc, - ) - plain_text = parse(page.content).strip_code().strip() - is_redirect = plain_text.startswith("REDIRECT") - - if is_redirect: - continue - - # Additional cleanup if you like (wtf.text() already removes a lot) - plain_text = re.sub( - r"thumb(?:\|(?:left|right|center|\d+px)*)*\|[^\n]*", "", plain_text - ).strip() - - if len(plain_text) == 0: - logger.warning("Skipping empty article") - continue - - date = datetime.datetime.strptime( - page.timestamp, "%Y-%m-%dT%H:%M:%SZ" - ).strftime("%Y-%m-%d") - - articles.append( - { - "id": f"{source}_{page.page_id}", - "source": source, - "created": f"{date}, {date}", - "text": f"{page.title}\n{plain_text}", - "added": today, - } - ) - finally: - bridge.close() - - df = pd.DataFrame(articles) - ds = Dataset.from_pandas(df) - ds = add_token_count(ds) - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = ensure_column_order(ds) - ds.to_parquet(parquet_file_path, compression="snappy") - - -def main(): - """ - Main function to orchestrate the download and processing. - """ - # --- Configuration --- - # URL for the latest Danish Wikipedia articles dump - WIKI_DUMP_URL = f"https://dumps.wikimedia.org/da{source}/latest/da{source}-latest-pages-articles.xml.bz2" - - # Local file paths - DOWNLOADED_BZ2_FILE = f"tmp/da{source}-latest-pages-articles.xml.bz2" - OUTPUT_PARQUET_FILE = "wikipedia.parquet" - - # --- Execution --- - # 1. Download the dump file - if not os.path.exists(DOWNLOADED_BZ2_FILE): - download_wiki_dump(WIKI_DUMP_URL, DOWNLOADED_BZ2_FILE) - else: - logger.info(f"File '{DOWNLOADED_BZ2_FILE}' already exists. Skipping download.") - - # 2. Process the dump and save to Parquet - process_dump_to_parquet(DOWNLOADED_BZ2_FILE, OUTPUT_PARQUET_FILE) - - # 3. (Optional) Clean up the downloaded file - # If you want to keep the bz2 file, comment out the next line. - logger.info(f"Cleaning up by removing '{DOWNLOADED_BZ2_FILE}'...") - os.remove(DOWNLOADED_BZ2_FILE) - - logger.info("\nScript finished successfully.") - - -if __name__ == "__main__": - log_path = Path(__file__).parent / f"{source}.log" - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(), - logging.FileHandler(log_path), - ], - ) - main() diff --git a/data/wikipedia/descriptive_stats.json b/data/wikipedia/descriptive_stats.json deleted file mode 100644 index 8f2a8bccb55b04740f7b636f74df6caa4fd5b0fd..0000000000000000000000000000000000000000 --- a/data/wikipedia/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 309641, - "number_of_tokens": 173334040, - "min_length_tokens": 8, - "max_length_tokens": 107351, - "number_of_characters": 518132934, - "min_length_characters": 13, - "max_length_characters": 321469 -} \ No newline at end of file diff --git a/data/wikipedia/images/dist_document_length.png b/data/wikipedia/images/dist_document_length.png deleted file mode 100644 index 3b553484256a9127a1cc0091251eee966d96b448..0000000000000000000000000000000000000000 --- a/data/wikipedia/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fed5865081bf0a4c5c3b0825afd6b8d07c9a93c19c9282f67ac180a1e6ecdead -size 568673 diff --git a/data/wikipedia/parser/package-lock.json b/data/wikipedia/parser/package-lock.json deleted file mode 100644 index 4c81f6284bb18cb308dcf1b823670db5e6ab9766..0000000000000000000000000000000000000000 --- a/data/wikipedia/parser/package-lock.json +++ /dev/null @@ -1,724 +0,0 @@ -{ - "name": "parser", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "parser", - "version": "1.0.0", - "license": "ISC", - "dependencies": { - "wtf_wikipedia": "^10.4.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@types/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", - "license": "MIT" - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", - "license": "MIT" - }, - "node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/camelcase-keys": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-7.0.2.tgz", - "integrity": "sha512-Rjs1H+A9R+Ig+4E/9oyB66UC5Mj9Xq3N//vcLf2WzgdTi/3gUu3Z9KoqmlrEG4VuuLK8wJHofxzdQXz/knhiYg==", - "license": "MIT", - "dependencies": { - "camelcase": "^6.3.0", - "map-obj": "^4.1.0", - "quick-lru": "^5.1.1", - "type-fest": "^1.2.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-5.0.1.tgz", - "integrity": "sha512-VfxadyCECXgQlkoEAjeghAr5gY3Hf+IKjKb+X8tGVDtveCjN+USwprd2q3QXBR9T1+x2DG0XZF5/w+7HAtSaXA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", - "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", - "license": "MIT", - "dependencies": { - "decamelize": "^1.1.0", - "map-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize-keys/node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys/node_modules/map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up/node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hard-rejection": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", - "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/indent-string": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", - "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "license": "MIT" - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/isomorphic-unfetch": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz", - "integrity": "sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==", - "license": "MIT", - "dependencies": { - "node-fetch": "^2.6.1", - "unfetch": "^4.2.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "license": "MIT" - }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "license": "MIT" - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/map-obj": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", - "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow": { - "version": "10.1.5", - "resolved": "https://registry.npmjs.org/meow/-/meow-10.1.5.tgz", - "integrity": "sha512-/d+PQ4GKmGvM9Bee/DPa8z3mXs/pkvJE2KEThngVNOqtmljC6K7NMPxtc2JeZYTmpWb9k/TmxjeL18ez3h7vCw==", - "license": "MIT", - "dependencies": { - "@types/minimist": "^1.2.2", - "camelcase-keys": "^7.0.0", - "decamelize": "^5.0.0", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "4.1.0", - "normalize-package-data": "^3.0.2", - "read-pkg-up": "^8.0.0", - "redent": "^4.0.0", - "trim-newlines": "^4.0.2", - "type-fest": "^1.2.2", - "yargs-parser": "^20.2.9" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/minimist-options": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", - "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", - "license": "MIT", - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0", - "kind-of": "^6.0.3" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-exists": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", - "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/path-exists-cli": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-exists-cli/-/path-exists-cli-2.0.0.tgz", - "integrity": "sha512-qGr0A87KYCznmvabblxyxnzA/MtPZ28wH+4SCMP4tjTFAbzqwvs5xpUZExAYzq5OgHe5vIswzdH5iosCb8YF/Q==", - "license": "MIT", - "dependencies": { - "meow": "^10.1.1", - "path-exists": "^5.0.0" - }, - "bin": { - "path-exists": "cli.js" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-6.0.0.tgz", - "integrity": "sha512-X1Fu3dPuk/8ZLsMhEj5f4wFAF0DWoK7qhGJvgaijocXxBmSToKfbFtqbxMO7bVjNA1dmE5huAzjXj/ey86iw9Q==", - "license": "MIT", - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^3.0.2", - "parse-json": "^5.2.0", - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-8.0.0.tgz", - "integrity": "sha512-snVCqPczksT0HS2EC+SxUndvSzn6LRCwpfSvLrIfR5BKDQQZMaI6jPRC9dYvYFDRAuFEAnkwww8kBBNE/3VvzQ==", - "license": "MIT", - "dependencies": { - "find-up": "^5.0.0", - "read-pkg": "^6.0.0", - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/redent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-4.0.0.tgz", - "integrity": "sha512-tYkDkVVtYkSVhuQ4zBgfvciymHaeuel+zFKXShfDnFP5SyVEP7qo70Rf1jTOTCx3vGNAbnEi/xFkcfQVMIBWag==", - "license": "MIT", - "dependencies": { - "indent-string": "^5.0.0", - "strip-indent": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", - "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", - "license": "CC0-1.0" - }, - "node_modules/strip-indent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", - "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", - "license": "MIT", - "dependencies": { - "min-indent": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "license": "MIT" - }, - "node_modules/trim-newlines": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-4.1.1.tgz", - "integrity": "sha512-jRKj0n0jXWo6kh62nA5TEh3+4igKDXLvzBJcPpiizP7oOolUrYIxmVBG9TOtHYFHoddUk6YvAkGeGoSVTXfQXQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/unfetch": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/unfetch/-/unfetch-4.2.0.tgz", - "integrity": "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==", - "license": "MIT" - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wtf_wikipedia": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/wtf_wikipedia/-/wtf_wikipedia-10.4.0.tgz", - "integrity": "sha512-yRxTiBURj2LW5HWAe+T7bCV2x45C/qTqcknUTmInKmB9cmLSxR6Nh44rB9K+nfNiydtjc3HLHwYWxMuHZtpVSQ==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "isomorphic-unfetch": "^3.1.0", - "path-exists-cli": "2.0.0" - }, - "bin": { - "wtf_wikipedia": "cli.js" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - } -} \ No newline at end of file diff --git a/data/wikipedia/parser/package.json b/data/wikipedia/parser/package.json deleted file mode 100644 index 090b8bdfa1172f7f314293c07f0b90c409788e41..0000000000000000000000000000000000000000 --- a/data/wikipedia/parser/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "parser", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "ISC", - "type": "commonjs", - "dependencies": { - "wtf_wikipedia": "^10.4.0" - } -} diff --git a/data/wikipedia/parser/wtf_bridge.js b/data/wikipedia/parser/wtf_bridge.js deleted file mode 100644 index a6630a92615d6098dcd85af8a7e9bcb0e790362d..0000000000000000000000000000000000000000 --- a/data/wikipedia/parser/wtf_bridge.js +++ /dev/null @@ -1,39 +0,0 @@ -// wtf_bridge.js -// Usage: node wtf_bridge.js -// Reads newline-delimited JSON from stdin: {"wikitext":"...","lang":"da"} -// Writes newline-delimited JSON to stdout: {"text":"...","isRedirect":false} or {"error":"..."} -const wtf = require('wtf_wikipedia'); -const readline = require('readline'); - -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false -}); - -process.on('uncaughtException', (err) => { - // emit as JSON so Python can see it - try { - process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); - } catch (e) {} - process.exit(1); -}); - -rl.on('line', (line) => { - (async () => { - try { - const payload = JSON.parse(line); - const wikitext = payload.wikitext || ''; - const lang = payload.lang || null; - - // parse wikitext into a document (sync) - const doc = lang ? wtf(wikitext, lang) : wtf(wikitext); - const text = (doc && typeof doc.text === 'function') ? doc.text() : ''; - const isRedirect = (doc && typeof doc.isRedirect === 'function') ? doc.isRedirect() : false; - - process.stdout.write(JSON.stringify({ text, isRedirect }) + '\n'); - } catch (err) { - process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); - } - })(); -}); \ No newline at end of file diff --git a/data/wikipedia/wiki.log b/data/wikipedia/wiki.log deleted file mode 100644 index 970b844aec6cc9614536818c194c93f4597f7d95..0000000000000000000000000000000000000000 --- a/data/wikipedia/wiki.log +++ /dev/null @@ -1,5 +0,0 @@ -2025-08-18 19:13:08,960 - INFO - Parsing XML dump... -2025-08-18 19:13:09,118 - WARNING - Skipping empty article -2025-08-18 19:13:11,361 - WARNING - Skipping empty article -2025-08-18 19:13:12,764 - WARNING - Skipping empty article -2025-08-18 19:13:13,945 - WARNING - Skipping empty article diff --git a/data/wikipedia/wikipedia.md b/data/wikipedia/wikipedia.md deleted file mode 100644 index 0b07c5a5ca3cb9b057d9e2f9ffc6d8f69242cf06..0000000000000000000000000000000000000000 --- a/data/wikipedia/wikipedia.md +++ /dev/null @@ -1,110 +0,0 @@ ---- -pretty_name: Wikipedia -language: -- da -license: cc0-1.0 -license_name: CC-0 -size_categories: -- 100k-1M -task_categories: -- text-generation -- fill-mask -task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Encyclopedic ---- - -# Dataset Card for Wikipedia - - -The Danish subsection of [wikipedia](https://en.wikipedia.org/wiki/Main_Page). - - - -You can read more about wikipedia on their [about](https://en.wikipedia.org/wiki/Wikipedia:About) page. - -## Dataset Description - - - -- **Number of samples**: 309.64K -- **Number of tokens (Llama 3)**: 173.33M -- **Average document length in tokens (min, max)**: 559.79 (8, 107.35K) - - - - -## Dataset Structure -An example from the dataset looks as follows. - - - -```py -{ - "id": "wiki_2", - "text": "Arkæologi\nArkæologi er studiet af tidligere tiders menneskelige aktivitet, primært gennem studiet af[...]", - "source": "wiki", - "added": "2025-08-18", - "created": "2024-06-16, 2024-06-16", - "token_count": 3380 -} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - - -### Dataset Statistics - - -

- -

- - - -### Processing - -For this dataset we have pulled the latest [database dump from wikimedia](https://dumps.wikimedia.org/dawiki/latest/) and extracted the texts using the [wtf_wikipedia](https://github.com/spencermountain/wtf_wikipedia/tree/dev) parser. - - - -Because the parser is written in javascript you need to have Node.js installed on you machine. - -To run the `create.py` file you first need to do: - -```bash -$ cd parser/ && npm install && cd .. -``` - -We chose to use `wtf_wikipedia` because out of the other parsers we tested this was the imperically best one. We tested `mwparserfromhell`, `mediawiki_dump`, `wikiextractor`, and `wtf_wikipedia`. It seemed that the others still produced some sort of artifacts from the parsing of wikicode. - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` diff --git a/data/wikipedia/wikipedia.parquet b/data/wikipedia/wikipedia.parquet deleted file mode 100644 index 73f73ba045f71f0d4d66783981651c40b0e38275..0000000000000000000000000000000000000000 --- a/data/wikipedia/wikipedia.parquet +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d0a8f27f66a761017e155b7820366227cd75566379aa755611f53e9299ed8717 -size 318870452 diff --git a/data/wikisource/create.py b/data/wikisource/create.py deleted file mode 100644 index 45144ba911bd7f8fe4c6f0f6ebb4b7ac7fe7df6e..0000000000000000000000000000000000000000 --- a/data/wikisource/create.py +++ /dev/null @@ -1,251 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# dependencies = [ -# "datasets", -# "mediawiki-dump", -# "mwparserfromhell", -# "pandas", -# "requests", -# "tqdm", -# "transformers", -# "dynaword" -# ] -# [tool.uv.sources] -# dynaword = { git = "https://huggingface.co/datasets/danish-foundation-models/danish-dynaword", rev = "00e7f2aee7f7ad2da423419f77ecbb9c0536de0d" } -# /// -import bz2 -import datetime -import json -import logging -import os -import re -import subprocess -import sys -import threading - -from mediawiki_dump.dumps import IteratorDump -from mediawiki_dump.reader import DumpReaderArticles - -from mwparserfromhell import parse - -import requests -from tqdm import tqdm -import pandas as pd - -from datasets import Dataset -from typing import Tuple - -from dynaword.process_dataset import ( - add_token_count, - ensure_column_order, - remove_duplicate_text, - remove_empty_texts, -) - -logger = logging.getLogger(__name__) -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) -source = "wikisource" - - -class WtfNodeBridge: - """ - Persistent Node bridge to wtf_wikipedia. - Call .parse(wikitext, lang=None) -> (text, is_redirect) - Remember to call .close() when done. - """ - - def __init__( - self, node_script_path: str = "parser/wtf_bridge.js", node_cmd: str = "node" - ): - self.proc = subprocess.Popen( - [node_cmd, node_script_path], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - encoding="utf-8", - bufsize=1, - ) - self._lock = threading.Lock() - - # background thread to log stderr (helpful for debugging) - def _drain_stderr(p): - try: - for line in p.stderr: - logger.warning("wtf_node stderr: %s", line.rstrip()) - except Exception: - pass - - t = threading.Thread(target=_drain_stderr, args=(self.proc,), daemon=True) - t.start() - - def parse(self, wikitext: str, lang: str | None = None) -> Tuple[str, bool]: - if self.proc.poll() is not None: - raise RuntimeError("Node bridge process has exited") - payload = {"wikitext": wikitext} - if lang: - payload["lang"] = lang - line = json.dumps(payload, ensure_ascii=False) - with self._lock: - # write and flush a single JSON line - try: - self.proc.stdin.write(line + "\n") - self.proc.stdin.flush() - except BrokenPipeError as e: - raise RuntimeError("Broken pipe writing to node bridge") from e - - # read exactly one JSON line back - out_line = self.proc.stdout.readline() - if not out_line: - raise RuntimeError("No response from node bridge (it may have exited)") - res = json.loads(out_line) - if res.get("error"): - # choose to either raise or return empty text; here we raise - raise RuntimeError("Node bridge error: " + res["error"]) - return res.get("text", ""), bool(res.get("isRedirect", False)) - - def close(self): - try: - if self.proc.stdin: - self.proc.stdin.close() - except Exception: - pass - try: - self.proc.terminate() - self.proc.wait(timeout=3) - except Exception: - try: - self.proc.kill() - except Exception: - pass - - -def download_wiki_dump(url: str, file_path: str): - """ - Downloads a file from a URL with a progress bar. - - Args: - url (str): The URL of the file to download. - file_path (str): The local path to save the file. - """ - print(f"Downloading {url} to {file_path}...") - try: - with requests.get(url, stream=True) as r: - r.raise_for_status() - total_size = int(r.headers.get("content-length", 0)) - block_size = 8192 # 8 Kibibytes - - with ( - open(file_path, "wb") as f, - tqdm( - total=total_size, unit="B", unit_scale=True, desc="Downloading" - ) as pbar, - ): - for chunk in r.iter_content(chunk_size=block_size): - if chunk: # filter out keep-alive chunks - f.write(chunk) - pbar.update(len(chunk)) - logger.info("\nDownload complete.") - except requests.exceptions.RequestException as e: - logger.error(f"\nError downloading file: {e}") - sys.exit(1) - - -def get_content(file_name: str): - with bz2.open(file_name, mode="r") as fp: - yield from fp - - -def process_dump_to_parquet(bz2_file_path: str, parquet_file_path: str): - dump = IteratorDump(iterator=get_content(file_name=bz2_file_path)) - pages = DumpReaderArticles().read(dump) - articles = [] - today = datetime.datetime.now().strftime("%Y-%m-%d") - - bridge = WtfNodeBridge("parser/wtf_bridge.js") - try: - for page in tqdm(pages): - try: - plain_text, is_redirect = bridge.parse(page.content, lang="da") - except Exception as exc: - logger.warning( - "wtf parse failed for page %s: %s -- falling back to mwparserfromhell", - getattr(page, "title", ""), - exc, - ) - plain_text = parse(page.content).strip_code().strip() - is_redirect = plain_text.startswith("REDIRECT") - - if is_redirect: - continue - - # Additional cleanup if you like (wtf.text() already removes a lot) - plain_text = re.sub( - r"thumb(?:\|(?:left|right|center|\d+px)*)*\|[^\n]*", "", plain_text - ).strip() - - if len(plain_text) == 0: - logger.warning("Skipping empty article") - continue - - date = datetime.datetime.strptime( - page.timestamp, "%Y-%m-%dT%H:%M:%SZ" - ).strftime("%Y-%m-%d") - - articles.append( - { - "id": f"{source}_{page.page_id}", - "source": source, - "created": f"{date}, {date}", - "text": f"{page.title}\n{plain_text}", - "added": today, - } - ) - finally: - bridge.close() - - df = pd.DataFrame(articles) - ds = Dataset.from_pandas(df) - ds = add_token_count(ds) - ds = remove_empty_texts(ds) - ds = remove_duplicate_text(ds) - ds = ensure_column_order(ds) - ds.to_parquet(parquet_file_path, compression="snappy") - - -def main(): - """ - Main function to orchestrate the download and processing. - """ - # --- Configuration --- - # URL for the latest Danish Wikipedia articles dump - WIKI_DUMP_URL = f"https://dumps.wikimedia.org/da{source}/latest/da{source}-latest-pages-articles.xml.bz2" - - # Local file paths - DOWNLOADED_BZ2_FILE = f"tmp/da{source}-latest-pages-articles.xml.bz2" - OUTPUT_PARQUET_FILE = f"{source}.parquet" - - # --- Execution --- - # 1. Download the dump file - if not os.path.exists(DOWNLOADED_BZ2_FILE): - download_wiki_dump(WIKI_DUMP_URL, DOWNLOADED_BZ2_FILE) - else: - print(f"File '{DOWNLOADED_BZ2_FILE}' already exists. Skipping download.") - - # 2. Process the dump and save to Parquet - process_dump_to_parquet(DOWNLOADED_BZ2_FILE, OUTPUT_PARQUET_FILE) - - # 3. (Optional) Clean up the downloaded file - # If you want to keep the bz2 file, comment out the next line. - print(f"Cleaning up by removing '{DOWNLOADED_BZ2_FILE}'...") - os.remove(DOWNLOADED_BZ2_FILE) - - print("\nScript finished successfully.") - - -if __name__ == "__main__": - # Before running, make sure you have the required libraries installed: - # pip install requests mwparserfromhell pandas pyarrow - main() diff --git a/data/wikisource/descriptive_stats.json b/data/wikisource/descriptive_stats.json deleted file mode 100644 index bb50685d0743387b7cacb1b8e07b261c7fda9f12..0000000000000000000000000000000000000000 --- a/data/wikisource/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 3004, - "number_of_tokens": 6275973, - "min_length_tokens": 17, - "max_length_tokens": 261099, - "number_of_characters": 18286474, - "min_length_characters": 54, - "max_length_characters": 754270 -} \ No newline at end of file diff --git a/data/wikisource/images/dist_document_length.png b/data/wikisource/images/dist_document_length.png deleted file mode 100644 index 3965a3fee401af09f8d30ec0c735ed12ed48d3c4..0000000000000000000000000000000000000000 --- a/data/wikisource/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a56b61df3f6093ec6caa098b61913854643bd2bbc976e9633b441ceb059e18cf -size 544083 diff --git a/data/wikisource/parser/package-lock.json b/data/wikisource/parser/package-lock.json deleted file mode 100644 index 9969047897d97e95f057e2fa9c80647e6196718c..0000000000000000000000000000000000000000 --- a/data/wikisource/parser/package-lock.json +++ /dev/null @@ -1,724 +0,0 @@ -{ - "name": "parser", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "parser", - "version": "1.0.0", - "license": "ISC", - "dependencies": { - "wtf_wikipedia": "^10.4.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@types/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", - "license": "MIT" - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", - "license": "MIT" - }, - "node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/camelcase-keys": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-7.0.2.tgz", - "integrity": "sha512-Rjs1H+A9R+Ig+4E/9oyB66UC5Mj9Xq3N//vcLf2WzgdTi/3gUu3Z9KoqmlrEG4VuuLK8wJHofxzdQXz/knhiYg==", - "license": "MIT", - "dependencies": { - "camelcase": "^6.3.0", - "map-obj": "^4.1.0", - "quick-lru": "^5.1.1", - "type-fest": "^1.2.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-5.0.1.tgz", - "integrity": "sha512-VfxadyCECXgQlkoEAjeghAr5gY3Hf+IKjKb+X8tGVDtveCjN+USwprd2q3QXBR9T1+x2DG0XZF5/w+7HAtSaXA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", - "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", - "license": "MIT", - "dependencies": { - "decamelize": "^1.1.0", - "map-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decamelize-keys/node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys/node_modules/map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up/node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hard-rejection": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", - "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/indent-string": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", - "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "license": "MIT" - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/isomorphic-unfetch": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz", - "integrity": "sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==", - "license": "MIT", - "dependencies": { - "node-fetch": "^2.6.1", - "unfetch": "^4.2.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "license": "MIT" - }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "license": "MIT" - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/map-obj": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", - "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow": { - "version": "10.1.5", - "resolved": "https://registry.npmjs.org/meow/-/meow-10.1.5.tgz", - "integrity": "sha512-/d+PQ4GKmGvM9Bee/DPa8z3mXs/pkvJE2KEThngVNOqtmljC6K7NMPxtc2JeZYTmpWb9k/TmxjeL18ez3h7vCw==", - "license": "MIT", - "dependencies": { - "@types/minimist": "^1.2.2", - "camelcase-keys": "^7.0.0", - "decamelize": "^5.0.0", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "4.1.0", - "normalize-package-data": "^3.0.2", - "read-pkg-up": "^8.0.0", - "redent": "^4.0.0", - "trim-newlines": "^4.0.2", - "type-fest": "^1.2.2", - "yargs-parser": "^20.2.9" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/minimist-options": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", - "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", - "license": "MIT", - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0", - "kind-of": "^6.0.3" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-exists": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", - "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/path-exists-cli": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-exists-cli/-/path-exists-cli-2.0.0.tgz", - "integrity": "sha512-qGr0A87KYCznmvabblxyxnzA/MtPZ28wH+4SCMP4tjTFAbzqwvs5xpUZExAYzq5OgHe5vIswzdH5iosCb8YF/Q==", - "license": "MIT", - "dependencies": { - "meow": "^10.1.1", - "path-exists": "^5.0.0" - }, - "bin": { - "path-exists": "cli.js" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-6.0.0.tgz", - "integrity": "sha512-X1Fu3dPuk/8ZLsMhEj5f4wFAF0DWoK7qhGJvgaijocXxBmSToKfbFtqbxMO7bVjNA1dmE5huAzjXj/ey86iw9Q==", - "license": "MIT", - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^3.0.2", - "parse-json": "^5.2.0", - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-8.0.0.tgz", - "integrity": "sha512-snVCqPczksT0HS2EC+SxUndvSzn6LRCwpfSvLrIfR5BKDQQZMaI6jPRC9dYvYFDRAuFEAnkwww8kBBNE/3VvzQ==", - "license": "MIT", - "dependencies": { - "find-up": "^5.0.0", - "read-pkg": "^6.0.0", - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/redent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-4.0.0.tgz", - "integrity": "sha512-tYkDkVVtYkSVhuQ4zBgfvciymHaeuel+zFKXShfDnFP5SyVEP7qo70Rf1jTOTCx3vGNAbnEi/xFkcfQVMIBWag==", - "license": "MIT", - "dependencies": { - "indent-string": "^5.0.0", - "strip-indent": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", - "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", - "license": "CC0-1.0" - }, - "node_modules/strip-indent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", - "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", - "license": "MIT", - "dependencies": { - "min-indent": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "license": "MIT" - }, - "node_modules/trim-newlines": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-4.1.1.tgz", - "integrity": "sha512-jRKj0n0jXWo6kh62nA5TEh3+4igKDXLvzBJcPpiizP7oOolUrYIxmVBG9TOtHYFHoddUk6YvAkGeGoSVTXfQXQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/unfetch": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/unfetch/-/unfetch-4.2.0.tgz", - "integrity": "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==", - "license": "MIT" - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wtf_wikipedia": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/wtf_wikipedia/-/wtf_wikipedia-10.4.0.tgz", - "integrity": "sha512-yRxTiBURj2LW5HWAe+T7bCV2x45C/qTqcknUTmInKmB9cmLSxR6Nh44rB9K+nfNiydtjc3HLHwYWxMuHZtpVSQ==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "isomorphic-unfetch": "^3.1.0", - "path-exists-cli": "2.0.0" - }, - "bin": { - "wtf_wikipedia": "cli.js" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - } -} diff --git a/data/wikisource/parser/package.json b/data/wikisource/parser/package.json deleted file mode 100644 index 090b8bdfa1172f7f314293c07f0b90c409788e41..0000000000000000000000000000000000000000 --- a/data/wikisource/parser/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "parser", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "ISC", - "type": "commonjs", - "dependencies": { - "wtf_wikipedia": "^10.4.0" - } -} diff --git a/data/wikisource/parser/wtf_bridge.js b/data/wikisource/parser/wtf_bridge.js deleted file mode 100644 index a6630a92615d6098dcd85af8a7e9bcb0e790362d..0000000000000000000000000000000000000000 --- a/data/wikisource/parser/wtf_bridge.js +++ /dev/null @@ -1,39 +0,0 @@ -// wtf_bridge.js -// Usage: node wtf_bridge.js -// Reads newline-delimited JSON from stdin: {"wikitext":"...","lang":"da"} -// Writes newline-delimited JSON to stdout: {"text":"...","isRedirect":false} or {"error":"..."} -const wtf = require('wtf_wikipedia'); -const readline = require('readline'); - -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false -}); - -process.on('uncaughtException', (err) => { - // emit as JSON so Python can see it - try { - process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); - } catch (e) {} - process.exit(1); -}); - -rl.on('line', (line) => { - (async () => { - try { - const payload = JSON.parse(line); - const wikitext = payload.wikitext || ''; - const lang = payload.lang || null; - - // parse wikitext into a document (sync) - const doc = lang ? wtf(wikitext, lang) : wtf(wikitext); - const text = (doc && typeof doc.text === 'function') ? doc.text() : ''; - const isRedirect = (doc && typeof doc.isRedirect === 'function') ? doc.isRedirect() : false; - - process.stdout.write(JSON.stringify({ text, isRedirect }) + '\n'); - } catch (err) { - process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); - } - })(); -}); \ No newline at end of file diff --git a/data/wikisource/wikisource.md b/data/wikisource/wikisource.md index 46f6360498a76531cd45696d847b0edac25b2c72..236d0b3f4a7e42fdb5077b6c8d3f3561f66c9dd1 100644 --- a/data/wikisource/wikisource.md +++ b/data/wikisource/wikisource.md @@ -1,106 +1,57 @@ --- pretty_name: Wikisource language: -- da + - da license: cc0-1.0 -license_name: CC-0 +license_name: Creative Commons Zero v1.0 Universal size_categories: -- 1-10k + - 1-10k task_categories: -- text-generation -- fill-mask + - text-generation + - fill-mask task_ids: -- language-modeling -source_datasets: -- danish-foundation-models/danish-gigaword -domains: -- Encyclopedic + - language-modeling --- - # Dataset Card for Wikisource - - -The Danish subsection of [Wikisource](https://en.wikisource.org/wiki/Main_Page). - - - ## Dataset Description - - - -- **Number of samples**: 3.00K -- **Number of tokens (Llama 3)**: 6.28M -- **Average document length in tokens (min, max)**: 2.09K (17, 261.10K) - - - - -## Dataset Structure +- **Number of records:** 2429 +- **Languages:** Danish +## Dataset Sturcture An example from the dataset looks as follows. - - - -```py +```yaml { - "id": "wikisource_1292", - "text": "Dejlig er den himmel blå\nDejlig er den himmel blå, lyst det er at se derpå, hvor de gyldne stjerner [...]", - "source": "wikisource", - "added": "2025-08-18", - "created": "2022-04-18, 2022-04-18", - "token_count": 1243 + 'text': '<poem> +Kæmpehøjen. +Jeg har stået på mindets ', + 'source': 'wikisource', + 'id': 'wikisource_4804', + 'added': '2021-03-28', + 'created': '1700-01-01, 2022-01-01', + 'metadata': { + 'domain': 'Wiki & Books', + 'license': 'Creative Commons Legal Code + +CC0 1.0 Universal', + 'source-pretty': 'Wikisource' + } } ``` -### Data Fields +## Data Fields -An entry in the dataset consists of the following fields: +- **id**: source-specific identifier. +- **text**: textual content of the document. +- **source**: source of the data. +- **added**: timestamp ai2 acquired this data. +- **created**": timestamp when original document was created (best-guess if not available) +- **metadata**: source-specific metadata. -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer - +## License Information +
+Creative Commons Zero v1.0 Universal +

+Creative Commons Legal Code -### Dataset Statistics - - -

- +CC0 1.0 Universal

- - - -### Processing - -For this dataset we have pulled the latest [database dump from wikimedia](https://dumps.wikimedia.org/dawikisource/latest/) and extracted the texts using the [wtf_wikipedia](https://github.com/spencermountain/wtf_wikipedia/tree/dev) parser. - -Because the parser is written in javascript you need to have Node.js installed on you machine. - -To run the `create.py` file you first need to do: - -```bash -$ cd parser/ && npm install && cd .. -``` - -We chose to use `wtf_wikipedia` because out of the other parsers we tested this was the imperically best one. We tested `mwparserfromhell`, `mediawiki_dump`, `wikiextractor`, and `wtf_wikipedia`. It seemed that the others still produced some sort of artifacts from the parsing of wikicode. - -## Additional Information - - -### Citation Information - -This dataset was initially published as part of the [Danish gigaword](https://huggingface.co/danish-foundation-models). We recommend that you cite and reference it if you use this dataset: - -> Derczynski, L., Ciosici, M. R., et al. (2021). The Danish Gigaword Corpus. In Proceedings of the 23rd Nordic Conference on Computational Linguistics (NoDaLiDa 2021). - -```bash -@inproceedings{dagw, - title = {{The Danish Gigaword Corpus}}, - author = {Leon Derczynski and Manuel R. Ciosici and Rebekah Baglini and Morten H. Christiansen and Jacob Aarup Dalsgaard and Riccardo Fusaroli and Peter Juel Henrichsen and Rasmus Hvingelby and Andreas Kirkedal and Alex Speed Kjeldsen and Claus Ladefoged and Finn Årup Nielsen and Jens Madsen and Malte Lau Petersen and Jonathan Hvithamar Rystrøm and Daniel Varab}, - year = 2021, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics}, - publisher = {NEALT} -} -``` +
diff --git a/data/wikisource/wikisource.parquet b/data/wikisource/wikisource.parquet index b4ee0e176c7f5122cd0edd028a8f48dbf67e10a7..ac980783fb7d7b43b9859dae342d67838ad949dc 100644 --- a/data/wikisource/wikisource.parquet +++ b/data/wikisource/wikisource.parquet @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b6a0405062f691a38eb39cd834281511abf1da0199b08ee9806a7b632e6a055c -size 10996692 +oid sha256:a4ee7ec0bb3147f06617c94a8951055a5a806c7917de229d6b2ec2df9c4c0b73 +size 9488335 diff --git a/descriptive_stats.json b/descriptive_stats.json deleted file mode 100644 index 21243b7739f44f90b50388764acb11b10cb5178c..0000000000000000000000000000000000000000 --- a/descriptive_stats.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "number_of_samples": 5612109, - "number_of_tokens": 5894652180, - "min_length_tokens": 2, - "max_length_tokens": 9812841, - "number_of_characters": 17805032682, - "min_length_characters": 1, - "max_length_characters": 37287484 -} \ No newline at end of file diff --git a/docs/icon.png b/docs/icon.png deleted file mode 100644 index 39961561ce83488abdbc641bc66550d40587e293..0000000000000000000000000000000000000000 --- a/docs/icon.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:04a40794e9081b680cb080e084ac3bd0dce0263a3f85d950ba903cb31dbfde9b -size 56349 diff --git a/docs/logo.key b/docs/logo.key deleted file mode 100644 index de3acc58ef0c5a54a3df7bc0fd88367e445b96ff..0000000000000000000000000000000000000000 Binary files a/docs/logo.key and /dev/null differ diff --git a/images/dataset_size_plot.html b/images/dataset_size_plot.html deleted file mode 100644 index 400716d09522f1286a1891c30f518d51e08ec1e7..0000000000000000000000000000000000000000 --- a/images/dataset_size_plot.html +++ /dev/null @@ -1,3885 +0,0 @@ - - - -
-
- - \ No newline at end of file diff --git a/images/dataset_size_plot.svg b/images/dataset_size_plot.svg deleted file mode 100644 index f4e338b5579fe17efaa5e8c6b129cf90dc57c49e..0000000000000000000000000000000000000000 --- a/images/dataset_size_plot.svg +++ /dev/null @@ -1 +0,0 @@ -110010k1MNorwegian Colossal Corpus (books)Norwegian Colossal Corpus (parliament)Norwegian Colossal Corpus (maalfrid)Wikipediaretsinformation.dk (Danish legal information)Hestenettet (Danish debate forum)Spontaneous speechTV 2 RadioHistorical Danish handwriting 1841-1939AI AktindsigtOpenSubtitlesEuropean ParliamentWikipedia CommentsMiljøportalenGutenbergDanish Dependency TreebankJohannes V. JensenWikisourceWikibooksNordjylland NewsNota lyd- og tekstdata (Tekst only)Archive for Danish LiteratureDomsdatabasen.dkEUR-Lex SUMretspraksis (Danish legal information)MeMo Canonical NovelsReligious textsDanske TalerDanNetNorwegian Colossal Corpus (newspaper)Synnejysk ForeningNAATEnevældens Nyheder OnlineBornholmskCellarFolketingetHealth Hovedstadenskat.dkGrundtvig's WorksFinansministeriets UdgivelserMin tokensMax tokensMean tokensToken Length Distribution by DatasetRange (min-max) with mean valuesNumber of Tokens (log scale)Dataset \ No newline at end of file diff --git a/images/dist_document_length.png b/images/dist_document_length.png deleted file mode 100644 index 33a039b25ee76c6a9639a1acd3d3a0bdbec80c06..0000000000000000000000000000000000000000 --- a/images/dist_document_length.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2c826da2a3871ef43faf9a70452a1d596b5d69d45c65b8d2672a3ff3f17c9914 -size 2074840 diff --git a/images/domain_distribution.png b/images/domain_distribution.png deleted file mode 100644 index 061ffcc5adb78cd783cfb1f837f2a681bb859d1f..0000000000000000000000000000000000000000 --- a/images/domain_distribution.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1b2d385ead4f49bb440488b01468e8d0689cba8b9bc05a03a10d271f4d349e3c -size 371710 diff --git a/images/tokens_over_time.html b/images/tokens_over_time.html deleted file mode 100644 index b21388e26245c50059b231051677fce1c8b61c03..0000000000000000000000000000000000000000 --- a/images/tokens_over_time.html +++ /dev/null @@ -1,7 +0,0 @@ - - - -
-
- - \ No newline at end of file diff --git a/images/tokens_over_time.svg b/images/tokens_over_time.svg deleted file mode 100644 index 8949a16e7ac4dfde68c21a2769eb89c7b3354a9e..0000000000000000000000000000000000000000 --- a/images/tokens_over_time.svg +++ /dev/null @@ -1 +0,0 @@ -Jan 2025Mar 2025May 2025Jul 2025Sep 20251.0G2.0G3.0G4.0G5.0G6.0GNumber of Tokens Over Time in Danish DynawordDateNumber of Tokens (Llama 3)Common Corpus (dan) (Langlais et al., 2025)Danish Gigaword (Derczynski et al., 2021) \ No newline at end of file diff --git a/makefile b/makefile index 13ea3576d8d4c71e4fc07bc2a613f26b22448a2f..ab188d43d4c7915a00e93b2966a1c0f84a666116 100644 --- a/makefile +++ b/makefile @@ -4,18 +4,9 @@ install: test: @echo "--- 🧪 Running tests ---" - uv run pytest src/tests/ | tee test_results.log - -lint: - @echo "--- 🧹 Running linters ---" - ruff format . # running ruff formatting - ruff check . --fix # running ruff linting + uv run pytest tests/ bump-version: @echo "--- 🚀 Bumping patch version ---" - uv run src/dynaword/bump_version.py + uv run scripts/bump_version.py -update-descriptive-statistics: - @echo "--- 🚀 Recomputing Descriptive statistics ---" - uv run src/dynaword/update_descriptive_statistics.py # compute missing descriptive statistics for all datasets - uv run src/dynaword/update_descriptive_statistics.py --dataset default --force # always ensure default dataset is up to date diff --git a/paper/figure_baseline.png b/paper/figure_baseline.png deleted file mode 100644 index ae64007b4d33472632b7ee07e18d844b5611c105..0000000000000000000000000000000000000000 --- a/paper/figure_baseline.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6cf32e792c92d9188a0e5a02ca2ffabe2516e97eb3914f4c5641fe5a47d8a045 -size 112746 diff --git a/paper/paper.md b/paper/paper.md deleted file mode 100644 index fd2942e1796e41a1ec3fbffd89946376eb7361e1..0000000000000000000000000000000000000000 --- a/paper/paper.md +++ /dev/null @@ -1,229 +0,0 @@ -# Dynaword: Moving from One-shot to Continously Developed Datasets - -Authors: - -- Kenneth Enevoldsen -- Kristian Nørgaaard Jensen -- Jan Kostkan - -- Peter Bjørn Jørgensen -- Per -- Kristoffer Nielbo - - - - -# Abstract - -Large scale datasets are foundational for research and development in natural language processing and related fields and good datasets -often require multiple iterations to improve and adjust. -Despite this we see many releases of static datasets rather than intended continually expanding resourced, thus preventing community -contributions and expansion. Even when a large-scale dataset see versioned releases the filtering and quality assurance is often only done by the -team releasing the data. -And while we have seen impressive large-scale released these are often derived from Common crawl or related sources which is likely to contain -copyrighted data that does not support the stated license of the release. This restricts not only the use of the data, but also its derivates, such as -annotated data and language models. -In an attempt to remedy this shortcoming we developed Danish Dynaword. An illustrative example of how large-scale datasets can be developed. This dynawords contain more than 2x as many tokens as comparable releases, is restricted to strictly permissible licenses data and have seen multipl contributions across industry and research. -This dataset comes equipped with CI to ensure data format, quality, and high documentation standards than can be run in a developer-friendly -enviroments in under 10 minutes. -Along with this release we have additionally started dynawords projects for Norwegian, Swedish, Faroese, Icelandic. - - -dataset is available at: https://huggingface.co/datasets/danish-foundation-models/danish-dynaword - -# Introduction - -Current datasets -While creating a current - -Current methods for dataset creation tacke only a small [@joshiStateFateLinguistic2020] -In the project we specifically choose to focus on the low to mid-resource language Danish (dan). We see two reasons for doing this: - -- The dynaword approach is most likely to be beneficial for low to mid resourced languages (class 2-4; @joshiStateFateLinguistic2020) which have contributors able and willing to contribute and where the domain high resource languages (class 5; @joshiStateFateLinguistic2020) could likely sustain multiple dynaword project targeting specific domains. -- not only for Danish b - -While it is in theory possible to open a PR on existing dataset, this practice is often rare and instead we often see improvements on the existing dataset published (see e.g. [@pascal_alie_kenneth_et_paper], [@that_guy_that_added_langauge_tag_to_a_dataset]). These derivative works rarely get as many downloads as the original - -Contrasting this approach to code development - where it is common practice to create PRs to continually improve the codebase - makes this dataset development landscape seems immature and inefficent. - - - - - - -## What is a Dynaword - -A dynaword is a continously developed dataset resource intended a variety of downstream use-cases within natural language processing. Dynaword does intend to replace existing large scale releases such as fine-web [@fineweb], OSCAR [@OSCAR], or HLPT [@hplt], but rather -complement these in situation where clearly licensed dataset might be preferred. Some of these cases for example include: - -- Clearly license datasets lends itself to better to derivative providing good starting points for permissibly annotated datasets. -- EUs AI-act also poses requirement on the training data used for model training -- The EUs AI act makes the distributor of a model responsible for copyright violations and thus companies might prefer models derived from clearly permissible data. - - -### Continuous Development of large Scale datasets - -Cont - -### Design Considerations - -## Related work - - -### Existing approaches in Dataset development - -Large project like OSCAR [@OSCAR], HPLT [@hplt], and fineweb [@fineweb] release iterative version of dataset derived from commoncrawl [@commoncrawl]. -These approaches make it hard to contributors to join contribute and siloes dataset development in a few institutions. Furthermore the focus -commoncrawl ignores other valuable resources such as public APIs and comes with a slew of ethical and legal concerns [@missing] which effect only the usefulness of the datasets but also the models derived from these. -While these resources such as individual dataset derived from APIs would be extensive to collect for individual groups as they rarely offer enough data to be worth the time opening up this approach to a community makes these approaches more viable. - - -Opening up development pipeline also increases openness around the dataset collection. ADD SOMETHING on inclusion here. - -Read up on fineweb!!! (I assume they do some CI) - -Other successful open-source project: dependency treebank project [@dep_treebank], ... - -Existing projects on open-licensed data [@elutherAI] - -We note that our approach is complementary to existing projects such as fineweb - - - - - - -### Danish and Scandinavian Datasets - -Lacunae of danish [@cite] -Danish gigaword [@dagw] -Swedish gigaword? [@swedish] -NCC [@ncc_kummervold] - - -Existing benchmark covering Scandinavian languages such as ScandEval [@scandeval; @scandeval2] and SEB [@seb] argue that reasonable to evalaute on the - -# Methods - -## Continuous Integration - -Our approach for continuous integration, how to submit, what we test for. - - -# Results - -## Dataset collection - -Current collection. - -| Source | Date | Domain | License | Size | -| --------------- | ---------- | -------------- | ------- | ---- | -| **Legal** | | | | | -| Retsinformation | date range | Legal, Written | | 188M | -| ... | | | | | -| **Total** | | | | | - - -For a description of each dataset we refer to the public repository. - - -# Conclusion - -## Dataset delivery - -# Limitation - -- Is danish too limited: Should we consider multilingual sources, scandinavian, germanic, English - -- Size: - - The size is currently limited if the size grows to large developing becomes problematic - - This is still way smaller than what could be extracted from CC - -- Only Danish: While developing CI for datasets is by no means new [@missing] doing so for open pre-training datasets open a collaborative fashion has -not been tested on a larger scale. Once the approach has been validated we plan to host a collaboration along with huggingface to develop these dataset sources. - -- Huggingface datasets as a development platform for datasets: Througout this work it was clear to many of the developers that the ease of contributing minor changes (e.g. filtering out a few bad examples) was both hard to create a PRs for and hard to review often requiring the reviewer to simply trust that the user did what was stated in the commit message. While previous projects have tackled this issue using human readable formats [@dep_treebank], due to the scope of the dataset this would quickly become inefficient. -This lack of clarity increased the likelihood of dataset attacks such as dataset poisoning [@missing]. We expect to see both interface development and software development to detect and prevent such attacks. - -- Machine generated content within training data: Not - -- Often we are interested in high-quality data when training an LLM. However the presented dynaword only performs a minimal level of cleaning. While -this is a deliberate decision as certain model choices might warrant for different cleaning approaches. This could leave a substantial level of post-processing to the user of the dataset. - -Ethical and Environmental consideration - -enviromental: -- common codebase lead to less duplication of dataset and reduces storage required -- continual ci running on large datasets could be a concern. Currently out tests use a total of XXX Co2-eq (estimated using codecarbon). however we have already seen people using training [@fineweb] and evaluating LLMs to appriximate dataset quality, such workflows could quickly incrase the co2 consumption. - - - - - -## Aditional content - -Comparison table - - -| | Size | Sufficient Documentation | Data availability | Legal Status | Quality | -| ---------------------- | ---- | ------------------------ | ----------------- | --------------- | -------------- | -| Danish Dynaword (Ours) | 3.5B | Replicable^ | Open Access | Openly Licensed | Mixed (high) | -| Danish Gigaword* | | Documentary | Open Access | Openly Licensed | Mixed (high) | -| Common Corpus (dan) | | Replicable | Open Access | Openly Licensed | OCR (low) | -| Fineweb (dan) | | Replicable | Open Access | | Mixed (medium) | - - - - - -*The Danish gigaword subsection included in Danish Dynaword. I.e. the subsection that is permissibly licensed. -^Some datasets are derived from Danish Gigaword, some of these subsection are not (currently) replicable - -This follows the scheme from figure 1 (https://arxiv.org/abs/2501.08365) - -Add comparison number of tokens comparison: -Common Corpus (DA) - -Gigaword (DA) - Open -M-Fineweb (DA) - - --> \ No newline at end of file diff --git a/paper/references.bib b/paper/references.bib deleted file mode 100644 index 2c1796659052fc7bfc099780f1c5e4f699badea8..0000000000000000000000000000000000000000 --- a/paper/references.bib +++ /dev/null @@ -1,25 +0,0 @@ - -@article{joshiStateFateLinguistic2021, - title = {The {State} and {Fate} of {Linguistic} {Diversity} and {Inclusion} in the {NLP} {World}}, - url = {http://arxiv.org/abs/2004.09095}, - abstract = {Language technologies contribute to promoting multilingualism and linguistic diversity around the world. However, only a very small number of the over 7000 languages of the world are represented in the rapidly evolving language technologies and applications. In this paper we look at the relation between the types of languages, resources, and their representation in NLP conferences to understand the trajectory that different languages have followed over time. Our quantitative investigation underlines the disparity between languages, especially in terms of their resources, and calls into question the "language agnostic" status of current models and systems. Through this paper, we attempt to convince the ACL community to prioritise the resolution of the predicaments highlighted here, so that no language is left behind.}, - urldate = {2021-03-20}, - journal = {arXiv:2004.09095 [cs]}, - author = {Joshi, Pratik and Santy, Sebastin and Budhiraja, Amar and Bali, Kalika and Choudhury, Monojit}, - month = jan, - year = {2021}, - note = {arXiv: 2004.09095}, - keywords = {Computer Science - Computation and Language}, -} - -@inproceedings{dagw, - title = {The {{Danish Gigaword}} Corpus}, - booktitle = {Proceedings of the 23rd Nordic Conference on Computational Linguistics ({{NoDaLiDa}})}, - author = {{Str{\o}mberg-Derczynski}, Leon and Ciosici, Manuel and Baglini, Rebekah and Christiansen, Morten H. and Dalsgaard, Jacob Aarup and Fusaroli, Riccardo and Henrichsen, Peter Juel and Hvingelby, Rasmus and Kirkedal, Andreas and Kjeldsen, Alex Speed and Ladefoged, Claus and Nielsen, Finn Aarup and Madsen, Jens and Petersen, Malte Lau and Rystr{\o}m, Jonathan Hvithamar and Varab, Daniel}, - year = {05 31--2 06 2021}, - pages = {413--421}, - publisher = {Link{\"o}ping University Electronic Press, Sweden}, - address = {Reykjavik, Iceland (Online)}, - abstract = {Danish language technology has been hindered by a lack of broad-coverage corpora at the scale modern NLP prefers. This paper describes the Danish Gigaword Corpus, the result of a focused effort to provide a diverse and freely-available one billion word corpus of Danish text. The Danish Gigaword corpus covers a wide array of time periods, domains, speakers' socio-economic status, and Danish dialects.}, - file = {/Users/au561649/Zotero/storage/9B3GVP6D/Derczynski et al. - The Danish Gigaword Corpus.pdf} -} diff --git a/pyproject.toml b/pyproject.toml index 40cd4eec964f24c198657544cb7c0e398adb95d2..27fe2da8df7e2dd78f12c91859c56582c494c9fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,34 +1,16 @@ [project] -name = "dynaword" -version = "1.2.12" -description = "project code for the danish dynaword project" +name = "danish-gigaword-2" +version = "1.0.1" +description = "project code for the danish gigaword 2 project" readme = "README.md" -requires-python = ">=3.12,<3.13" # 3.13 have issues with spacy and pytorch +requires-python = ">=3.13" dependencies = [ - # for commands - "datasets>=3.0.0", # loading and validating datasets - "pydantic>=2.10.4", # validating schemas - "tabulate>=0.9.0", # creating md table - "tomlkit>=0.13.2", # reading toml - "transformers>=4.47.1", # tokenization - # figures - "plotnine>=0.14.5", - "plotly>=6.0.1", - "nbformat>=4.2.0", - "kaleido==0.2.1", -] - -[dependency-groups] -dev = [ - # development + "datasets>=3.0.0", "ipykernel>=6.29.5", - "pip>=25.0.1", - # test + "matplotlib>=3.10.0", + "numpy>=2.2.0", + "plotnine>=0.14.3", "pytest>=8.3.4", - # formatting - "ruff>=0.8.3", + "seaborn>=0.13.2", + "toml>=0.10.2", ] - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" diff --git a/scripts/bump_version.py b/scripts/bump_version.py new file mode 100644 index 0000000000000000000000000000000000000000..45d9d80da61916b3a893427e2b8982c0a2fc422c --- /dev/null +++ b/scripts/bump_version.py @@ -0,0 +1,16 @@ +from packaging.version import Version +from pathlib import Path + +import toml + +c_file = Path(__file__) +pyproject = c_file.parent.parent / "pyproject.toml" + + +with pyproject.open("r") as f: + data = toml.load(f) + version = Version(data["project"]["version"]) + data["project"]["version"] = str(Version(f"{version.major}.{version.minor}.{version.micro + 1}")) + +with pyproject.open("w") as f: + toml.dump(data, f) \ No newline at end of file diff --git a/scripts/load_dataset.py b/scripts/load_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..976ee5236560bcfbe57ed81929b529c3233cf227 --- /dev/null +++ b/scripts/load_dataset.py @@ -0,0 +1,6 @@ +from datasets import load_dataset + +name = "../." # "danish-foundation-models/danish-gigaword" +ds = load_dataset("../.", split = "train") + +ds \ No newline at end of file diff --git a/src/dynaword/bump_version.py b/src/dynaword/bump_version.py deleted file mode 100644 index 74be25b6d17c43eba61203d2569460c71dc736d4..0000000000000000000000000000000000000000 --- a/src/dynaword/bump_version.py +++ /dev/null @@ -1,56 +0,0 @@ -from pathlib import Path - -import tomlkit -from packaging.version import Version - -from dynaword.paths import pyproject_path, readme_path - - -def get_version(pyproject_path: Path = pyproject_path) -> str: - with pyproject_path.open("r") as f: - data = tomlkit.load(f) - return data["project"]["version"] # type: ignore - - -def update_pyproject_version(version: str, pyproject_path: Path) -> None: - with pyproject_path.open("r") as f: - data = tomlkit.load(f) - data["project"]["version"] = version # type: ignore - - with pyproject_path.open("w") as f: - tomlkit.dump(data, f) - - -def update_readme(version: str, readme_path: Path) -> None: - """Find version in README table and update it.""" - start = "" - end = "" - - with readme_path.open("r") as f: - lines = f.readlines() - - in_table = False - for i, line in enumerate(lines): - if start in line: - in_table = True - if in_table: - if "**Version**" in line: - lines[i] = f"| **Version** | {version} ([Changelog](/CHANGELOG.md)) |\n" - break - if end in line: - raise ValueError("**Version** not found in README table.") - - with readme_path.open("w") as f: - f.writelines(lines) - - -def main(pyproject_path: Path, readme_path: Path) -> None: - version = get_version(pyproject_path) - version = Version(version) - version = Version(f"{version.major}.{version.minor}.{version.micro + 1}") - update_pyproject_version(str(version), pyproject_path) - update_readme(str(version), readme_path) - - -if __name__ == "__main__": - main(pyproject_path, readme_path) diff --git a/src/dynaword/dataset_structure.py b/src/dynaword/dataset_structure.py deleted file mode 100644 index dc55a881b7b1fbefeee3d02e3c7169179a1acf78..0000000000000000000000000000000000000000 --- a/src/dynaword/dataset_structure.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging -from datetime import date -from enum import Enum - -from pydantic import BaseModel, BeforeValidator -from typing_extensions import Annotated - -logger = logging.getLogger(__name__) - - -def ensure_tuple(created: str | tuple) -> tuple: - if isinstance(created, str): - return tuple(created.split(", ")) - return created - - -class SampleSchema(BaseModel): - id: str - text: str - source: str - added: date - created: Annotated[tuple[date, date], BeforeValidator(ensure_tuple)] - token_count: int - - -class ColumnNames(Enum): - id = "id" - text = "text" - source = "source" - added = "added" - created = "created" - token_count = "token_count" - - -COLUMN_ORDER = [col.value for col in ColumnNames] diff --git a/src/dynaword/datasheet.py b/src/dynaword/datasheet.py deleted file mode 100644 index eb4d3c14b7fa5a9f403c8690c3c930bd8518ccd1..0000000000000000000000000000000000000000 --- a/src/dynaword/datasheet.py +++ /dev/null @@ -1,307 +0,0 @@ -import json -import logging -from datetime import datetime -from enum import Enum -from pathlib import Path -from textwrap import dedent -from typing import Any, Literal, Self, cast - -import yaml -from datasets import Dataset, IterableDataset, load_dataset -from pydantic import BaseModel, field_validator - -from dynaword.descriptive_stats import DescriptiveStatsOverview -from dynaword.plots.descriptive_statistics_plots import ( - create_descriptive_statistics_plots, -) -from dynaword.typings import DOMAIN, LICENSE, LICENSE_NAMES_MAPPING - -logger = logging.getLogger(__name__) - - -LICENSE_HEADER = "## License Information" - - -class DEFAULT_SECTION_TAGS(Enum): - desc_stats = "DESC-STATS" - sample = "SAMPLE" - dataset_plots = "DATASET PLOTS" - short_description = "SHORT DESCRIPTION" - - -DATASET_PLOTS_template = """ -

- -

-""" - - -SAMPLE_template = """ -```py -{sample} -``` - -### Data Fields - -An entry in the dataset consists of the following fields: - -- `id` (`str`): An unique identifier for each document. -- `text`(`str`): The content of the document. -- `source` (`str`): The source of the document (see [Source Data](#source-data)). -- `added` (`str`): An date for when the document was added to this collection. -- `created` (`str`): An date range for when the document was originally created. -- `token_count` (`int`): The number of tokens in the sample computed using the Llama 8B tokenizer -""" - - -def convert_to_human_readable(value: float) -> str: - thresholds = [ - (1_000_000_000, "B"), - (1_000_000, "M"), - (1_000, "K"), - ] - for threshold, label in thresholds: - if value > threshold: - return f"{value / threshold:.2f}{label}" - - return str(value) - - -def create_sample_str(sample: dict[str, Any], max_str_len: int = 100): - for k in sample: - if isinstance(sample[k], str) and len(sample[k]) > max_str_len: - sample[k] = sample[k][:max_str_len] + "[...]" - if isinstance(sample[k], datetime): - sample[k] = str(sample[k]) - - json_sample = json.dumps(sample, indent=2, ensure_ascii=False) - sample_str = SAMPLE_template.format(sample=json_sample) - - return sample_str - - -class DataSheet(BaseModel): - pretty_name: str - license: LICENSE - license_name: str | None - language: list[Literal["da"]] - domains: list[DOMAIN] | None # None for main readme # TODO: make literal - path: Path - frontmatter: dict[str, Any] - body: str - - # check that licence name is compatible with license - @field_validator("license_name") # type: ignore - def check_license_name(cls, v: str | None, values: dict[str, Any]) -> str | None: - if v is not None and v in LICENSE_NAMES_MAPPING: - if values["license"] != LICENSE_NAMES_MAPPING[v]: - raise ValueError( - f"License name '{v}' does not match license '{values['license']}'" - ) - return v - - @property - def short_description(self) -> str: - short_description = self.get_tag_content(DEFAULT_SECTION_TAGS.short_description) - if short_description.endswith("."): - short_description = short_description[:-1] - return short_description - - @property - def license_information(self) -> str: - return self.get_section_by_header(LICENSE_HEADER) - - @property - def frontmatter_as_str(self) -> str: - return yaml.dump(self.frontmatter, indent=2, sort_keys=False) - - def to_str(self) -> str: - return f"---\n{self.frontmatter_as_str.strip()}\n---\n\n{self.body.strip()}\n" - - def get_dataset(self, **kwargs) -> Dataset: - ds_path = self.path.parent - # required to avoid loading .png files for the images/ folder (e.g. for plots) instead of parquet files - ignore_dirs = {".venv", "tmp"} # add more if needed - - parquet_files = [ - p.as_posix() - for p in ds_path.glob("**/*.parquet") - if not any(ignored in p.parts for ignored in ignore_dirs) - ] - ds = load_dataset( - ds_path.as_posix(), split="train", data_files=parquet_files, **kwargs - ) - ds = cast(Dataset, ds) - return ds - - def get_descritive_stats(self) -> DescriptiveStatsOverview: - path = self.path.parent / "descriptive_stats.json" - return DescriptiveStatsOverview.from_disk(path) - - def get_section_indices_by_header(self, header: str) -> tuple[int, int]: - level = header.split(" ")[0].count("#") - - next_is_end_section = False - end_header = None - for _header in self.get_headers(levels=list(range(1, level + 1))): - if header.strip() == _header.strip(): - next_is_end_section = True - continue - - if next_is_end_section: - end_header = _header - break - - if next_is_end_section is None: - raise ValueError(f"The header '{header}' is not found in the text.") - - start_idx = self.body.find(header) - if end_header: - end_idx = self.body[start_idx:].find(end_header) + start_idx - else: - end_idx = len(self.body) - - return start_idx, end_idx - - def get_section_by_header(self, header: str) -> str: - s, e = self.get_section_indices_by_header(header) - return self.body[s:e] - - def get_headers(self, levels: list[int] = [1, 2, 3, 4]) -> list[str]: - def __contains_level(text: str) -> bool: - if text.startswith("#"): - for level in levels: - if text.startswith("#" * level): - return True - return False - - return [line for line in self.body.splitlines() if __contains_level(line)] - - def get_tag_idx(self, tag: str | DEFAULT_SECTION_TAGS) -> tuple[int, int]: - if isinstance(tag, Enum): - tag = tag.value - tag_start = f"" - tag_end = f"" - start_idx = self.body.find(tag_start) - end_idx = self.body.find(tag_end) - if end_idx != -1 and start_idx != -1 and start_idx < end_idx: - return start_idx, end_idx - raise ValueError(f"tag ({tag}) not found in readme") - - def get_tag_content(self, tag: str | DEFAULT_SECTION_TAGS) -> str: - if isinstance(tag, Enum): - tag = tag.value - s, e = self.get_tag_idx(tag=tag) - tag_start = f"" - return self.body[s + len(tag_start) : e].strip() - - def add_descriptive_stats( - self, descriptive_stats: DescriptiveStatsOverview | None = None - ) -> str: - if descriptive_stats is None: - d_stats = DescriptiveStatsOverview.from_dataset(self.get_dataset()) - else: - d_stats = descriptive_stats - - package = ( - dedent(f""" - - **Number of samples**: {convert_to_human_readable(d_stats.number_of_samples)} - - **Number of tokens (Llama 3)**: {convert_to_human_readable(d_stats.number_of_tokens)} - - **Average document length in tokens (min, max)**: {convert_to_human_readable(d_stats.average_document_length_tokens)} ({convert_to_human_readable(d_stats.min_length_tokens)}, {convert_to_human_readable(d_stats.max_length_tokens)}) - """).strip() - + "\n" - ) - - return self.replace_tag( - package=package, - tag=DEFAULT_SECTION_TAGS.desc_stats, - ) - - def add_dataset_plots(self, dataset: Dataset, create_plot: bool = True) -> str: - if create_plot: - create_descriptive_statistics_plots( - dataset=dataset, save_dir=self.path.parent - ) - return self.replace_tag( - package=DATASET_PLOTS_template, tag=DEFAULT_SECTION_TAGS.dataset_plots - ) - - def add_sample_and_description( - self, dataset: Dataset | IterableDataset | None = None - ) -> str: - if dataset is None: - dataset = self.get_dataset(streaming=True) - - sample = dataset[0] if isinstance(dataset, Dataset) else next(iter(dataset)) - return self.replace_tag( - package=create_sample_str(sample), tag=DEFAULT_SECTION_TAGS.sample - ) - - def replace_tag(self, package: str, tag: str | DEFAULT_SECTION_TAGS) -> str: - """Add replace a tag in the datasheet body. - - Args: - package: What you want to replace it with - tag: What tag you want to replace - - Returns: - The entire body text - """ - if isinstance(tag, Enum): - tag = tag.value - tag_start = f"" - tag_end = f"" - - if self.body.count(tag_start) != 1 or self.body.count(tag_end) != 1: - raise ValueError( - f"The markers ({tag_start} ... {tag_end}) does not appear in the markdown. Markers should appear exactly once in the markdown." - ) - - start_md, _, remainder = self.body.partition(tag_start) - _, _, end_md = remainder.partition(tag_end) - - return f"{start_md}{tag_start}\n{package.strip()}\n{tag_end}{end_md}" - - @staticmethod - def get_frontmatter_and_body(file_path: Path) -> tuple[dict[str, Any], str]: - with file_path.open("r") as f: - content = f.read() - if content.startswith("---"): - end_idx = content.find("---", 3) - start_idx_body = end_idx + 3 - if end_idx != -1: - frontmatter = content[3:end_idx].strip() - return yaml.safe_load(frontmatter), content[start_idx_body:] - raise ValueError(f"No frontmatter found in file: {file_path}") - - @classmethod - def load_from_path(cls, readme_path: Path) -> Self: - frontmatter, body = cls.get_frontmatter_and_body(readme_path) - return cls( - frontmatter=frontmatter, - body=body, - license=frontmatter["license"], - language=frontmatter["language"], - pretty_name=frontmatter["pretty_name"], - domains=frontmatter["domains"] if "domains" in frontmatter else None, - license_name=frontmatter["license_name"] - if "license_name" in frontmatter - else None, - path=readme_path, - ) - - def write_to_path(self, readme_path: Path | None = None) -> None: - if readme_path is None: - readme_path = self.path - with readme_path.open("w") as f: - f.write(self.to_str()) - - -if __name__ == "__main__": - from dynaword.paths import repo_path - - sheet = DataSheet.load_from_path(repo_path / "data" / "dannet" / "dannet.md") - ds = sheet.get_dataset() - - sheet.body = sheet.add_descriptive_stats(descriptive_stats=None) - sheet.write_to_path() diff --git a/src/dynaword/descriptive_stats.py b/src/dynaword/descriptive_stats.py deleted file mode 100644 index b56ed9ad903fb9f18dd8cc22ab3fa53a66135923..0000000000000000000000000000000000000000 --- a/src/dynaword/descriptive_stats.py +++ /dev/null @@ -1,95 +0,0 @@ -from __future__ import annotations - -import json -import logging -from dataclasses import dataclass -from pathlib import Path - -from datasets import Dataset - -logger = logging.getLogger(__name__) - - -def calculate_average_document_length( - dataset: Dataset, text_column: str = "text" -) -> float: - texts = sum(len(t) for t in dataset[text_column]) - return texts / len(dataset) - - -@dataclass() -class DescriptiveStatsOverview: - """ - Overview of descriptive statistics for a dataset. - - Attributes: - number_of_samples: Total number of samples in the dataset. - number_of_tokens: Total number of tokens in the dataset - min_length: Minimum document length in tokens. - max_length: Maximum document length in tokens. - average_document_length: Average document length in tokens. - """ - - number_of_samples: int - number_of_tokens: int - min_length_tokens: int - max_length_tokens: int - number_of_characters: int - min_length_characters: int - max_length_characters: int - - @property - def average_document_length_tokens(self) -> float: - return ( - round(self.number_of_tokens / self.number_of_samples, 2) - if self.number_of_samples > 0 - else 0.0 - ) - - @property - def average_document_length_characters(self) -> float: - return ( - round(self.number_of_characters / self.number_of_samples, 2) - if self.number_of_samples > 0 - else 0.0 - ) - - @classmethod - def from_disk(cls, path: Path) -> DescriptiveStatsOverview: - with path.open("r") as f: - data = json.load(f) - obj = cls(**data) - return obj - - def to_disk(self, path: Path) -> None: - with path.with_suffix(".json").open("w") as f: - json.dump(self.__dict__, f, indent=2) - - @classmethod - def from_dataset(cls, dataset: Dataset) -> DescriptiveStatsOverview: - return cls( - number_of_samples=len(dataset), - number_of_tokens=sum(dataset["token_count"]), - min_length_tokens=min(dataset["token_count"]), - max_length_tokens=max(dataset["token_count"]), - number_of_characters=sum(len(t) for t in dataset["text"]), - min_length_characters=min(len(t) for t in dataset["text"]), - max_length_characters=max(len(t) for t in dataset["text"]), - ) - - def __add__(self, other: DescriptiveStatsOverview) -> DescriptiveStatsOverview: - if not isinstance(other, DescriptiveStatsOverview): - raise TypeError("Can only add DescriptiveStatsOverview objects") - return DescriptiveStatsOverview( - number_of_samples=self.number_of_samples + other.number_of_samples, - number_of_tokens=self.number_of_tokens + other.number_of_tokens, - min_length_tokens=min(self.min_length_tokens, other.min_length_tokens), - max_length_tokens=max(self.max_length_tokens, other.max_length_tokens), - number_of_characters=self.number_of_characters + other.number_of_characters, - min_length_characters=min( - self.min_length_characters, other.min_length_characters - ), - max_length_characters=max( - self.max_length_characters, other.max_length_characters - ), - ) diff --git a/src/dynaword/paths.py b/src/dynaword/paths.py deleted file mode 100644 index b1a3775f61ed5847c9fd7cfd7a787d4eba136678..0000000000000000000000000000000000000000 --- a/src/dynaword/paths.py +++ /dev/null @@ -1,5 +0,0 @@ -from pathlib import Path - -repo_path = Path(__file__).parent.parent.parent -pyproject_path = repo_path / "pyproject.toml" -readme_path = repo_path / "README.md" diff --git a/src/dynaword/plots/descriptive_statistics_plots.py b/src/dynaword/plots/descriptive_statistics_plots.py deleted file mode 100644 index ca92d66774558889549ec9638191696254bbee12..0000000000000000000000000000000000000000 --- a/src/dynaword/plots/descriptive_statistics_plots.py +++ /dev/null @@ -1,44 +0,0 @@ -import logging -from pathlib import Path - -import pandas as pd -import plotnine as pn -from datasets import Dataset - -logger = logging.getLogger(__name__) - - -def create_descriptive_statistics_plots( - dataset: Dataset, - save_dir: Path, -) -> tuple[Path, pn.ggplot]: - logger.info("creating descriptive statistics plot to readme.") - lengths = dataset["token_count"] - df = pd.DataFrame({"lengths": lengths, "Source": dataset["source"]}) - - plot = ( - pn.ggplot(df, pn.aes(x="lengths", y=pn.after_stat("count"))) - + pn.geom_histogram(bins=100) - + pn.labs( - x="Document Length (Tokens)", - y="Count", - title="Distribution of Document Lengths", - ) - + pn.theme_minimal() - + pn.facet_wrap("Source", scales="free", ncol=3) - ) - - img_path = save_dir / "images" - img_path.mkdir(parents=False, exist_ok=True) - save_path = img_path / "dist_document_length.png" - pn.ggsave( - plot, - save_path, - dpi=500, - width=10, - height=10, - units="in", - verbose=False, - ) - - return save_path, plot diff --git a/src/dynaword/plots/plot_tokens_over_time.py b/src/dynaword/plots/plot_tokens_over_time.py deleted file mode 100644 index a62f7d28b5ae873f997e7eefd689731720ffa430..0000000000000000000000000000000000000000 --- a/src/dynaword/plots/plot_tokens_over_time.py +++ /dev/null @@ -1,241 +0,0 @@ -import json -import logging -import subprocess -from datetime import datetime -from typing import Any, Dict, List, Optional, Tuple - -import pandas as pd -import plotly.graph_objects as go - -from dynaword.paths import repo_path - -# Configure logging -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) -logger = logging.getLogger(__name__) - - -def get_file_history( - filename: str = "descriptive_stats.json", -) -> List[Tuple[str, str, str]]: - """Get commit history for a file with commit messages""" - logger.info(f"Retrieving git history for {filename}") - - cmd = [ - "git", - "log", - "--format=%H|%ci|%s", # commit hash | commit date | subject - "--", - filename, - ] - - try: - result = subprocess.run( - cmd, capture_output=True, text=True, cwd=repo_path, check=True - ) - commits = [] - - for line in result.stdout.strip().split("\n"): - if line: - parts = line.split("|", 2) # Split on first 2 pipes only - if len(parts) == 3: - commit_hash, date_str, message = parts - commits.append((commit_hash, date_str, message)) - - logger.info(f"Found {len(commits)} commits for {filename}") - return commits - - except subprocess.CalledProcessError as e: - logger.error(f"Failed to get git history: {e}") - return [] - - -def get_file_at_commit(commit_hash: str, filename: str) -> Optional[Dict[str, Any]]: - """Get file content at specific commit""" - cmd = ["git", "show", f"{commit_hash}:{filename}"] - - try: - result = subprocess.run( - cmd, capture_output=True, text=True, cwd=repo_path, check=True - ) - return json.loads(result.stdout) - except (subprocess.CalledProcessError, json.JSONDecodeError) as e: - logger.warning(f"Failed to parse {filename} at commit {commit_hash[:8]}: {e}") - return None - - -def create_token_dataframe(filename: str = "descriptive_stats.json") -> pd.DataFrame: - """Create DataFrame with token history from git commits""" - logger.info("Building token history dataframe from git commits") - - commits = get_file_history(filename) - if not commits: - logger.warning("No commits found") - return pd.DataFrame() - - data = [] - for commit_hash, date_str, commit_message in commits: - file_data = get_file_at_commit(commit_hash, filename) - if file_data and "number_of_tokens" in file_data: - try: - date = datetime.fromisoformat(date_str.split(" ")[0]) - data.append( - { - "date": date, - "tokens": file_data["number_of_tokens"], - "samples": file_data.get("number_of_samples", 0), - "avg_length": file_data.get("average_document_length", 0), - "commit": commit_hash, - "commit_short": commit_hash[:8], - "commit_message": commit_message, - } - ) - except ValueError as e: - logger.warning(f"Failed to parse date {date_str}: {e}") - - # Convert to DataFrame and sort by date - df = pd.DataFrame(data) - if df.empty: - logger.warning("No valid data found in commits") - return df - - df = df.sort_values("date").reset_index(drop=True) - - # Calculate token changes - if len(df) > 1: - df["token_change"] = df["tokens"].diff() - - logger.info( - f"Created dataframe with {len(df)} data points spanning {df['date'].min().date()} to {df['date'].max().date()}" - ) - return df - - -def _format_tokens(value: float) -> str: - """Format tokens with human-readable suffixes""" - if value >= 1e12: - return f"{value / 1e12:.2f}T" - elif value >= 1e9: - return f"{value / 1e9:.2f}G" - elif value >= 1e6: - return f"{value / 1e6:.2f}M" - elif value >= 1e3: - return f"{value / 1e3:.2f}k" - else: - return f"{value:.0f}" - - -def _create_hover_text(df: pd.DataFrame) -> List[str]: - """Create hover text for each data point""" - hover_text = [] - for _, row in df.iterrows(): - hover_info = ( - f"Date: {row['date'].strftime('%Y-%m-%d')}
" - f"Tokens: {_format_tokens(row['tokens'])}
" - ) - - if pd.notna(row.get("token_change")): - change_sign = "+" if row["token_change"] >= 0 else "" - hover_info += ( - f"Change: {change_sign}{_format_tokens(abs(row['token_change']))}
" - ) - - hover_info += ( - f"Samples: {row['samples']:,}
" - f"Commit: {row['commit_short']}
" - f"Message: {row['commit_message']}" - ) - hover_text.append(hover_info) - - return hover_text - - -def _add_reference_lines(fig: go.Figure) -> None: - """Add reference lines for other Danish corpora""" - references = [ - (300_000_000, "Common Corpus (dan) (Langlais et al., 2025)"), - (1_000_000_000, "Danish Gigaword (Derczynski et al., 2021)"), - ] - - for y_value, annotation in references: - fig.add_hline( - y=y_value, - line_dash="dash", - line_color="gray", - line_width=1, - annotation_text=annotation, - annotation_position="top left", - annotation_font_size=12, - annotation_font_color="gray", - ) - - -def plot_tokens_over_time( - df: pd.DataFrame, width: int = 600, height: int = 400 -) -> go.Figure: - """Plot tokens over time using Plotly with interactive hover info""" - hover_text = _create_hover_text(df) - - # Create the plot - fig = go.Figure() - - # Add main data line - fig.add_trace( - go.Scatter( - x=df["date"], - y=df["tokens"], - mode="lines+markers", - name="Tokens", - line=dict(width=3, color="#DC2626"), # Saturated red - marker=dict(size=5, color="#DC2626"), - hovertemplate="%{text}", - text=hover_text, - ) - ) - - # Add reference lines - _add_reference_lines(fig) - - # Update layout - fig.update_layout( - title="Number of Tokens Over Time in Danish Dynaword", - xaxis_title="Date", - yaxis_title="Number of Tokens (Llama 3)", - hovermode="closest", - width=width, - height=height, - showlegend=False, - plot_bgcolor="rgba(0,0,0,0)", # Transparent plot background - paper_bgcolor="rgba(0,0,0,0)", # Transparent paper background - ) - - # Set x-axis and y-axis properties - # x_min = df["date"].min() - pd.Timedelta(days=) - # x_max = df["date"].max() + pd.Timedelta(days=1) - - # Format y-axis - fig.update_yaxes(tickformat=".2s", ticksuffix="") - # fig.update_xaxes(range=[x_min, x_max]) # Explicitly set x-axis range - return fig - - -def create_tokens_over_time_plot() -> None: - """Main function to create DataFrame and plot tokens over time""" - df = create_token_dataframe() - if not df.empty: - logger.info("Generating interactive plot") - fig = plot_tokens_over_time(df) - else: - logger.warning("No data available to plot") - - save_path = repo_path / "images" / "tokens_over_time.html" - save_path_svg = repo_path / "images" / "tokens_over_time.svg" - - save_path.parent.mkdir(parents=True, exist_ok=True) - fig.write_html(save_path, include_plotlyjs="cdn") - fig.write_image(save_path_svg) - - -if __name__ == "__main__": - create_tokens_over_time_plot() diff --git a/src/dynaword/plots/plots_dataset_size.py b/src/dynaword/plots/plots_dataset_size.py deleted file mode 100644 index 0ae2201eb334396e7f57ba6863de6970920505f7..0000000000000000000000000000000000000000 --- a/src/dynaword/plots/plots_dataset_size.py +++ /dev/null @@ -1,134 +0,0 @@ -import json -import logging -from pathlib import Path - -import pandas as pd -import plotly.graph_objects as go - -from dynaword.datasheet import DataSheet -from dynaword.paths import repo_path - -# Configure logging -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) -logger = logging.getLogger(__name__) - - -def _create_descriptive_stats_table( - repo_path: Path = repo_path, -) -> pd.DataFrame: - """ - Create a DataFrame from the descriptive statistics data. - """ - p = (repo_path / "data").glob("**/*descriptive_stats.json") - - data = [] - for path in p: - with path.open("r") as f: - package = json.load(f) - sheet = DataSheet.load_from_path(path.parent / f"{path.parent.name}.md") - package["dataset_name"] = path.parent.name - package["pretty_name"] = sheet.pretty_name - data.append(package) - - df = pd.DataFrame(data) - df["mean_length_tokens"] = df["number_of_tokens"] / df["number_of_samples"] - df["mean_length_characters"] = df["number_of_characters"] / df["number_of_samples"] - return df - - -def plot_dataset_size(df: pd.DataFrame) -> go.Figure: - """Plot dataset size using a range plot with min, max, and mean token lengths.""" - # Calculate mean token length per document - df["mean_length_tokens"] = df["number_of_tokens"] / df["number_of_samples"] - - # Create the range plot - fig = go.Figure() - - # Add range bars (from min to max) - for i, row in df.iterrows(): - fig.add_trace( - go.Scatter( - x=[row["min_length_tokens"], row["max_length_tokens"]], - y=[row["dataset_name"], row["dataset_name"]], - mode="lines", - line=dict(color="lightgray", width=3), - showlegend=False, - hoverinfo="skip", - ) - ) - - # Add min points - fig.add_trace( - go.Scatter( - x=df["min_length_tokens"], - y=df["dataset_name"], - mode="markers", - marker=dict(color="lightblue", size=6, symbol="circle"), - name="Min tokens", - hovertemplate="%{y}
Min: %{x:,} tokens", - ) - ) - - # Add max points - fig.add_trace( - go.Scatter( - x=df["max_length_tokens"], - y=df["dataset_name"], - mode="markers", - marker=dict(color="darkred", size=6, symbol="circle"), - name="Max tokens", - hovertemplate="%{y}
Max: %{x:,} tokens", - ) - ) - - # Add mean points - fig.add_trace( - go.Scatter( - x=df["mean_length_tokens"], - y=df["dataset_name"], - mode="markers", - marker=dict(color="orange", size=8, symbol="diamond"), - name="Mean tokens", - hovertemplate="%{y}
Mean: %{x:,.0f} tokens", - ) - ) - - fig.update_layout( - title="Token Length Distribution by Dataset
Range (min-max) with mean values", - xaxis_title="Number of Tokens (log scale)", - xaxis_type="log", - yaxis_title="Dataset", - height=len(df["dataset_name"]) * 20, # Scaling based on number of datasets - template="plotly_white", - margin=dict(l=120), # More space for dataset names - yaxis=dict( - tickmode="array", - tickvals=df["dataset_name"], - ticktext=df["pretty_name"], - categoryorder="array", # keep dataset order - categoryarray=df["dataset_name"].tolist(), - range=[-0.5, len(df["dataset_name"]) - 0.5], # <-- fixes top/bottom padding - ), - ) - - return fig - - -def create_dataset_size_plot() -> None: - logger.info("Creating range plot of dataset sizes using `descriptive_stats.json`.") - df = _create_descriptive_stats_table() - fig = plot_dataset_size(df) - - save_path = repo_path / "images" / "dataset_size_plot.html" - save_path_svg = repo_path / "images" / "dataset_size_plot.svg" - - logger.info(f"Saving dataset size plot to {save_path} and {save_path_svg}.") - save_path.parent.mkdir(parents=True, exist_ok=True) - fig.write_html(save_path) - fig.write_image(save_path_svg) - - -if __name__ == "__main__": - create_dataset_size_plot() diff --git a/src/dynaword/process_dataset.py b/src/dynaword/process_dataset.py deleted file mode 100644 index af8ba11580fda6c07a9852098c6cc399316cb78c..0000000000000000000000000000000000000000 --- a/src/dynaword/process_dataset.py +++ /dev/null @@ -1,74 +0,0 @@ -"""""" - -import logging -from functools import partial -from typing import Any - -from datasets import Dataset -from transformers import AutoTokenizer - -from dynaword.dataset_structure import COLUMN_ORDER, ColumnNames - -logger = logging.getLogger(__name__) - -# TODO: Add a step to compute the size categories and update the frontmatter - - -def _tokenize_function( - examples: dict[str, Any], tokenizer: AutoTokenizer -) -> dict[str, Any]: - encodings = tokenizer( - examples["text"], - padding=False, - truncation=False, - return_length=True, # much faster, avoids storing all IDs - ) # type: ignore - return {"token_count": encodings["length"]} - - -def add_token_count( - ds: Dataset, - tokenizer_name: str = "AI-Sweden-Models/Llama-3-8B-instruct", - num_proc: int = 4, -) -> Dataset: - tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=True) - - tokenize = partial(_tokenize_function, tokenizer=tokenizer) # type: ignore - - ds = ds.map(tokenize, batched=True, num_proc=num_proc) - return ds - - -def _filter_duplicates(example: dict[str, Any], seen_set: set) -> bool: - if example[ColumnNames.text.value] in seen_set: - return False - seen_set.add(example[ColumnNames.text.value]) - return True - - -def remove_duplicate_text(ds: Dataset) -> Dataset: - logger.info("Removing duplicate texts") - seen_texts = set() - len_ds = len(ds) - ds = ds.filter(partial(_filter_duplicates, seen_set=seen_texts)) - logger.info(f"Filtered {len_ds - len(ds)} duplicate examples") - return ds - - -def _filter_empty(example: dict[str, Any]) -> bool: - return len(example[ColumnNames.text.value].strip()) > 0 - - -def remove_empty_texts(ds: Dataset, num_proc: int = 4) -> Dataset: - logger.info("Removing empty texts") - len_ds = len(ds) - ds = ds.filter(_filter_empty, num_proc=num_proc) - logger.info(f"Filtered {len_ds - len(ds)} empty examples") - - return ds - - -def ensure_column_order(ds: Dataset) -> Dataset: - logger.info("Ensuring columns are in the correct order and are present") - ds = ds.select_columns(COLUMN_ORDER) - return ds diff --git a/src/dynaword/tables.py b/src/dynaword/tables.py deleted file mode 100644 index 5a9a2c5f440747f14a060844a7a73b894a31b2e0..0000000000000000000000000000000000000000 --- a/src/dynaword/tables.py +++ /dev/null @@ -1,212 +0,0 @@ -from pathlib import Path -from typing import Literal - -import pandas as pd - -from dynaword.datasheet import DataSheet, convert_to_human_readable -from dynaword.paths import repo_path - -main_sheet = DataSheet.load_from_path(repo_path / "README.md") -_datasets = [ - cfg["config_name"] # type: ignore - for cfg in main_sheet.frontmatter["configs"] # type: ignore - if cfg["config_name"] != "default" # type: ignore -] - -DEFAULT_LICENSE_REFERENCES = """[CC-0]: https://creativecommons.org/publicdomain/zero/1.0/legalcode.en -[CC-BY-SA 4.0]: https://creativecommons.org/licenses/by-sa/4.0/deed.en -[CC-BY 4.0]: https://creativecommons.org/licenses/by/4.0/deed.en -[Apache 2.0]: https://www.apache.org/licenses/LICENSE-2.0 -""" - - -def create_license_references() -> str: - license_references = DEFAULT_LICENSE_REFERENCES - for dataset in _datasets: - dataset_path = repo_path / "data" / dataset - readme_path = dataset_path / f"{dataset_path.name}.md" - - sheet = DataSheet.load_from_path(readme_path) - - if sheet.license == "other": - license_name = sheet.frontmatter["license_name"] - license_references += f"[{license_name}]: ./data/{dataset_path.name}/{dataset_path.name}.md#license-information\n" - - return license_references - - -def create_dataset_readme_references(): - readme_references = "" - - for dataset in _datasets: - dataset_path = repo_path / "data" / dataset - - readme_references += ( - f"[{dataset_path.name}]: data/{dataset_path.name}/{dataset_path.name}.md\n" - ) - return readme_references - - -def create_overview_table( - repo_path: Path = repo_path, - add_readable_tokens: bool = True, - add_total_row: bool = True, - add_readme_references: bool = True, -) -> pd.DataFrame: - table = { - "Source": [], - "Sources": [], - "Description": [], - "Domain": [], - "N. Tokens": [], - "License": [], - } - - for dataset in _datasets: - dataset_path = repo_path / "data" / dataset - readme_path = dataset_path / f"{dataset_path.name}.md" - - sheet = DataSheet.load_from_path(readme_path) - desc_stats = sheet.get_descritive_stats() - main_domain = sheet.domains[0] if sheet.domains else "" - - table["Source"] += [f"{dataset_path.name}"] - table["Sources"] += [f"[{dataset_path.name}]"] - table["License"] += [f"[{sheet.license_name}]"] - table["Domain"] += [main_domain] - table["Description"] += [sheet.short_description] - table["N. Tokens"] += [desc_stats.number_of_tokens] - - df = pd.DataFrame.from_dict(table) - df = df.sort_values("N. Tokens", ascending=False) - - if add_total_row: - total_row = { - "Source": "**Total**", - "Sources": "**Total**", - "Domain": "", - "License": "", - "Description": "", - "N. Tokens": sum(table["N. Tokens"]), - } - df = pd.concat( - [ - df, - pd.DataFrame([total_row]), - ], - ignore_index=True, - ) - if add_readme_references: - # replace Source with Sources - df["Source"] = df["Sources"] - df = df.drop(columns=["Sources"]) - else: - # remove Sources - df = df.drop(columns=["Sources"]) - - if add_readable_tokens: - df["N. Tokens"] = df["N. Tokens"].apply(convert_to_human_readable) - - return df - - -def _get_normalized_license(ds: DataSheet) -> str: - non_standard_license_names = { - "Apache 2.0": "Other (Attribution required)", - "NLOD 2.0": "Other (Attribution required)", - "DanNet 1.0": "Other (Attribution required)", - "Gutenberg": "Other (Attribution required)", - "Danish Copyright Law": "Other (No attribution required)", - } - if ( - ds.license_name not in non_standard_license_names - and ds.license_name is not None - ): - return ds.license_name - if ds.license_name is None: - raise ValueError( - f"Datasheet {ds.pretty_name} has no license name specified in the frontmatter." - ) - return non_standard_license_names[ds.license_name] - - -def _get_feature_by_string( - datasheet: DataSheet, feature_name: Literal["Domain", "Language", "License"] -) -> str: - """Get a specific feature from the frontmatter.""" - - match feature_name: - case "Domain": - return datasheet.domains[0] if datasheet.domains else "N/A" - case "Language": - return ", ".join(datasheet.language) - case "License": - return _get_normalized_license(datasheet) - case _: - raise ValueError(f"Unknown feature: {feature_name}") - - -def create_grouped_table( - group: Literal["Domain", "Language", "License"] = "Domain", - repo_path: Path = repo_path, - add_readable_tokens: bool = True, - add_total_row: bool = True, -) -> pd.DataFrame: - table = { - "Sources": [], - group: [], - "N. Tokens": [], - } - - for dataset in _datasets: - dataset_path = repo_path / "data" / dataset - readme_path = dataset_path / f"{dataset_path.name}.md" - - sheet = DataSheet.load_from_path(readme_path) - desc_stats = sheet.get_descritive_stats() - feature = _get_feature_by_string(sheet, group) - - table["Sources"] += [f"[{dataset_path.name}]"] - table[group] += [feature] - table["N. Tokens"] += [desc_stats.number_of_tokens] - - if add_total_row: - table["Sources"] += [""] - table[group] += ["**Total**"] - table["N. Tokens"] += [sum(table["N. Tokens"])] - - df = pd.DataFrame.from_dict(table) - - df = df.groupby(group).agg({"Sources": lambda x: ", ".join(x), "N. Tokens": "sum"}) - - df = df.sort_values("N. Tokens", ascending=False) - - df.index.name = group - df = df.reset_index() - - # Trick the Total row to be at the bottom. - new_index = list(df.index.drop(0)) + [0] - df = df.reindex(new_index) - - if add_readable_tokens: - df["N. Tokens"] = df["N. Tokens"].apply(convert_to_human_readable) - - return df - - -def create_grouped_table_str( - repo_path: Path = repo_path, - group: Literal["Domain", "Language", "License"] = "Domain", -) -> str: - table = create_grouped_table(group=group, repo_path=repo_path) - readme_references = create_dataset_readme_references() - package = f"{table.to_markdown(index=False, maxcolwidths=[None, None, None])}\n\n{readme_references}\n\n" - return package - - -def create_overview_table_str(repo_path: Path = repo_path) -> str: - main_table = create_overview_table(repo_path) - readme_references = create_dataset_readme_references() - license_references = create_license_references() - package = f"{main_table.to_markdown(index=False)}\n\n{readme_references}\n\n{license_references}\n\n" - return package diff --git a/src/dynaword/typings.py b/src/dynaword/typings.py deleted file mode 100644 index aa7a1d398eb4dff237aa0c18101f6e2fc5f3cc71..0000000000000000000000000000000000000000 --- a/src/dynaword/typings.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Literal - -DOMAIN = Literal[ - "Books", - "Conversation", - "Dialect", - "Encyclopedic", - "Legal", - "Medical", - "News", - "Other", - "Readaloud", - "Social Media", - "Speeches", - "Spoken", - "Subtitles", - "Web", -] - -LICENSE = Literal["cc0-1.0", "other", "cc-by-sa-4.0", "apache-2.0", "cc-by-4.0"] - -LICENSE_NAMES_MAPPING = { - "cc0-1.0": "CC0", - "cc-by-sa-4.0": "CC BY-SA 4.0", - "cc-by-4.0": "CC-BY 4.0", - "apache-2.0": "Apache 2.0", -} diff --git a/src/dynaword/update_descriptive_statistics.py b/src/dynaword/update_descriptive_statistics.py deleted file mode 100644 index 99618452711c740dfee6c2b9287f90a8080fc2c1..0000000000000000000000000000000000000000 --- a/src/dynaword/update_descriptive_statistics.py +++ /dev/null @@ -1,170 +0,0 @@ -""" -A simple CLI to updates descriptive statistics on all datasets. - -Example use: - - uv run src/dynaword/update_descriptive_statistics.py --dataset wikisource - -""" - -import argparse -import logging -from pathlib import Path -from typing import cast - -import plotly.express as px -from datasets import Dataset, load_dataset - -from dynaword.datasheet import DataSheet -from dynaword.descriptive_stats import DescriptiveStatsOverview -from dynaword.paths import repo_path -from dynaword.plots.plot_tokens_over_time import create_tokens_over_time_plot -from dynaword.plots.plots_dataset_size import create_dataset_size_plot -from dynaword.tables import ( - create_grouped_table_str, - create_overview_table, - create_overview_table_str, -) - -logger = logging.getLogger(__name__) - -main_sheet = DataSheet.load_from_path(repo_path / "README.md") -_datasets = [ - cfg["config_name"] # type: ignore - for cfg in main_sheet.frontmatter["configs"] # type: ignore - if cfg["config_name"] != "default" # type: ignore -] - - -logger = logging.getLogger(__name__) - - -def create_domain_distribution_plot( - save_dir: Path = repo_path, -): - df = create_overview_table( - add_readable_tokens=False, add_total_row=False, add_readme_references=False - ) - fig = px.sunburst(df, path=["Domain", "Source"], values="N. Tokens") - - fig.update_traces(textinfo="label+percent entry") - fig.update_layout(title="Dataset Distribution by Domain and Source") - - img_path = save_dir / "images" - img_path.mkdir(parents=False, exist_ok=True) - save_path = img_path / "domain_distribution.png" - fig.write_image( - save_path, - width=800, - height=800, - scale=2, - ) - - -def update_dataset( - dataset_name: str, - force: bool = False, -) -> None: - dataset_path = ( - repo_path / "data" / dataset_name if dataset_name != "default" else repo_path - ) - - if dataset_name == "default": - readme_name = "README.md" - else: - readme_name = f"{dataset_name}.md" - - desc_stats_path = dataset_path / "descriptive_stats.json" - markdown_path = dataset_path / readme_name - - if desc_stats_path.exists() and force is False: - logger.info( - f"descriptive statistics for '{dataset_name}' is already exists (``{desc_stats_path}``), skipping." - ) - return - - logger.info(f"Updating datasheet for: {dataset_name}") - sheet = DataSheet.load_from_path(markdown_path) - - if dataset_name != "default": - ds = load_dataset(str(repo_path), dataset_name, split="train") - ds = cast(Dataset, ds) - desc_stats = DescriptiveStatsOverview.from_dataset(ds) - sheet.body = sheet.add_dataset_plots(ds, create_plot=True) - else: - # compute descriptive stats from existing files - desc_paths = (repo_path / "data").glob("**/*descriptive_stats.json") - _desc_stats = [DescriptiveStatsOverview.from_disk(p) for p in desc_paths] - desc_stats = sum(_desc_stats[1:], start=_desc_stats[0]) - desc_stats.to_disk(desc_stats_path) - - sheet.body = sheet.add_descriptive_stats(descriptive_stats=desc_stats) - sheet.body = sheet.add_sample_and_description() - - if dataset_name == "default": - logger.info("Updating Overview table") - overview_table = create_overview_table_str() - sheet.body = sheet.replace_tag(package=overview_table, tag="MAIN TABLE") - logger.info("Updating domain table") - domain_table = create_grouped_table_str(group="Domain") - sheet.body = sheet.replace_tag(package=domain_table, tag="DOMAIN TABLE") - logger.info("Updating license table") - domain_table = create_grouped_table_str(group="License") - sheet.body = sheet.replace_tag(package=domain_table, tag="LICENSE TABLE") - create_domain_distribution_plot() - create_tokens_over_time_plot() - create_dataset_size_plot() - - sheet.write_to_path() - - -def create_parser(): - parser = argparse.ArgumentParser( - description="Calculated descriptive statistics of the datasets in tha data folder" - ) - parser.add_argument( - "--dataset", - default=None, - type=str, - help="Use to specify if you only want to compute the statistics from a singular dataset.", - ) - parser.add_argument( - "--logging_level", - default=20, - type=int, - help="Sets the logging level. Default to 20 (INFO), other reasonable levels are 10 (DEBUG) and 30 (WARNING).", - ) - parser.add_argument( - "--force", - type=bool, - default=False, - action=argparse.BooleanOptionalAction, - help="Should the statistics be forcefully recomputed. By default it checks the difference in commit ids.", - ) - return parser - - -def main( - dataset: str | None = None, - logging_level: int = 20, - force: bool = False, -) -> None: - logging.basicConfig(level=logging_level) - - if dataset: - update_dataset(dataset, force=force) - else: - for dataset_name in _datasets: - update_dataset(dataset_name, force=force) - update_dataset("default", force=force) - - -if __name__ == "__main__": - parser = create_parser() - args = parser.parse_args() - - main( - args.dataset, - logging_level=args.logging_level, - force=args.force, - ) diff --git a/src/tests/__init__.py b/src/tests/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/src/tests/conftest.py b/src/tests/conftest.py deleted file mode 100644 index a377e968d80103e701bef19179eaef52212de8fe..0000000000000000000000000000000000000000 --- a/src/tests/conftest.py +++ /dev/null @@ -1,14 +0,0 @@ -from pathlib import Path - -from dynaword.datasheet import DataSheet - -root_path = Path(__file__).parent.parent.parent -main_readme = root_path / "README.md" - -main_sheet = DataSheet.load_from_path(main_readme) - -DATASET_NAMES = [ - cfg["config_name"] - for cfg in main_sheet.frontmatter["configs"] - if cfg["config_name"] != "default" -] diff --git a/src/tests/test_dataset_schema.py b/src/tests/test_dataset_schema.py deleted file mode 100644 index 7742e593611eac548a904363d827e1f5867be957..0000000000000000000000000000000000000000 --- a/src/tests/test_dataset_schema.py +++ /dev/null @@ -1,37 +0,0 @@ -import pytest -from datasets import load_dataset - -from dynaword.dataset_structure import SampleSchema -from dynaword.paths import repo_path - -from .conftest import DATASET_NAMES - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_sample_schema(dataset_name: str): - """Ensure that the dataset samples follow the correct schema""" - - ds = load_dataset( - str(repo_path.resolve()), dataset_name, split="train", streaming=True - ) - sample = next(iter(ds)) - SampleSchema(**sample) - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_dataset_folder_structure(dataset_name: str): - """tests that the dataset folder structure is as follows. - - dataset_name - |- dataset_name.md - |- dataset_name.parquet - - If there is a python file, there should at least be one called `create.py`, but there can be additional. - """ - path = repo_path / "data" / dataset_name - - assert (path / f"{path.name}.parquet").exists() - assert (path / f"{path.name}.md").exists() - - if any(p.name.endswith(".py") for p in path.glob("*")): - assert (path / "create.py").exists() diff --git a/src/tests/test_datasheets.py b/src/tests/test_datasheets.py deleted file mode 100644 index 4e6a74af5de8dc60bca0f35a41ff069b724b41c3..0000000000000000000000000000000000000000 --- a/src/tests/test_datasheets.py +++ /dev/null @@ -1,56 +0,0 @@ -import pytest - -from dynaword.datasheet import DEFAULT_SECTION_TAGS, DataSheet -from dynaword.paths import repo_path - -from .conftest import DATASET_NAMES - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_datasheet_load(dataset_name: str): - """tests that the dataset frontmatter and markdown follows the correct format.""" - - readme = repo_path / "data" / dataset_name / f"{dataset_name}.md" - ds_sheet = DataSheet.load_from_path( # noqa: F841 - readme - ) # will fail if format is not correct - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_datasheet_content_tags(dataset_name: str): - readme = repo_path / "data" / dataset_name / f"{dataset_name}.md" - ds_sheet = DataSheet.load_from_path(readme) - - # ensure tags: - tags = [v.value for v in DEFAULT_SECTION_TAGS] - for tag in tags: - ds_sheet.get_tag_idx(tag) - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_datasheet_license_info(dataset_name: str): - """Ensure that license information is present is license is other""" - readme = repo_path / "data" / dataset_name / f"{dataset_name}.md" - ds_sheet = DataSheet.load_from_path(readme) - - if ds_sheet.license == "other": # ensure description of underspecified licenses - assert ds_sheet.license_information.strip() - assert ds_sheet.license_name - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_datasheet_required_headings(dataset_name: str): - readme = repo_path / "data" / dataset_name / f"{dataset_name}.md" - ds_sheet = DataSheet.load_from_path(readme) - - req_h2_headings = ["## Dataset Description", "## Additional Information"] - for req_h2 in req_h2_headings: - assert ds_sheet.get_section_by_header(req_h2) - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_domains_in_frontmatter(dataset_name: str): - readme = repo_path / "data" / dataset_name / f"{dataset_name}.md" - ds_sheet = DataSheet.load_from_path(readme) - - assert ds_sheet.domains, "domains annotations are missing" diff --git a/src/tests/test_load.py b/src/tests/test_load.py deleted file mode 100644 index 1a2df22b4936fd84690b26a3de5faab927c0d54d..0000000000000000000000000000000000000000 --- a/src/tests/test_load.py +++ /dev/null @@ -1,33 +0,0 @@ -from datasets import load_dataset - -from dynaword.datasheet import DataSheet -from dynaword.paths import repo_path - -REMOVED_DATA = [ - "lexdk" -] # data that has been removed due to legal disputes, question about legality, or similar - - -def test_dataset_loads(): - """Ensures that the dataset can load as intended""" - name = str(repo_path.resolve()) - ds = load_dataset(name, split="train", streaming=True) - sample = next(iter(ds)) - assert isinstance(sample, dict) - - -def test_all_datasets_in_yaml(): - ds_sheet = DataSheet.load_from_path(repo_path / "README.md") - - ds_names = { - cfg["config_name"] - for cfg in ds_sheet.frontmatter["configs"] - if cfg["config_name"] != "default" - } - - data_folder = repo_path / "data" - datasets = data_folder.glob("*") - - for dataset in datasets: - if dataset.name not in REMOVED_DATA: - assert dataset.name in ds_names diff --git a/src/tests/test_quality/__init__.py b/src/tests/test_quality/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/src/tests/test_quality/test_duplicates.py b/src/tests/test_quality/test_duplicates.py deleted file mode 100644 index 598cbe19107f28a4c10a848fe3e92a556448cbe5..0000000000000000000000000000000000000000 --- a/src/tests/test_quality/test_duplicates.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import cast - -import pytest -from datasets import Dataset, load_dataset - -from dynaword.paths import repo_path -from ..conftest import DATASET_NAMES - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -def test_no_within_data_duplicates(dataset_name: str): - ds = load_dataset(str(repo_path.resolve()), dataset_name, split="train") - ds = cast(Dataset, ds) - - assert len(set(ds["text"])) == len(ds) - - -@pytest.mark.skip( - "This tests takes too long to run" -) # there seems to be some duplicate across -def test_no_data_duplicates(): - ds = load_dataset(str(repo_path.resolve()), split="train") - ds = cast(Dataset, ds) - - assert len(set(ds["text"])) == len(ds) diff --git a/src/tests/test_quality/test_short_texts.py b/src/tests/test_quality/test_short_texts.py deleted file mode 100644 index 96821ca7ef2c9ed9520676ec1e8f9793256a7260..0000000000000000000000000000000000000000 --- a/src/tests/test_quality/test_short_texts.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import cast - -import pytest -from datasets import Dataset, load_dataset - -from dynaword.paths import repo_path - -from ..conftest import DATASET_NAMES - - -@pytest.mark.parametrize("dataset_name", DATASET_NAMES) -# @pytest.mark.skip("This tests currently fails") -def test_no_one_word_documents(dataset_name: str): - ds = load_dataset(str(repo_path.resolve()), dataset_name, split="train") - ds = cast(Dataset, ds) - - one_word_docs = ds.filter(lambda x: x["token_count"] <= 1) - - assert ( - len(one_word_docs) == 0 - ), f"Found {len(one_word_docs)} one-word documents in dataset '{dataset_name}'" diff --git a/src/tests/test_unique_ids.py b/src/tests/test_unique_ids.py deleted file mode 100644 index 2fdf20f484821e9f835f7a4d5c46cd136a527f88..0000000000000000000000000000000000000000 --- a/src/tests/test_unique_ids.py +++ /dev/null @@ -1,15 +0,0 @@ -from collections import Counter -from typing import cast - -from datasets import Dataset, load_dataset - -from dynaword.paths import repo_path - - -def test_ensure_ids_are_unique(): - name = str(repo_path.resolve()) - ds = load_dataset(name, split="train") - ds = cast(Dataset, ds) - counter = Counter(ds["id"]) - duplicates = [item for item, count in counter.items() if count > 1] - assert len(duplicates) == 0, f"Duplicate IDs found: {duplicates}" diff --git a/test_results.log b/test_results.log deleted file mode 100644 index 0b28ad1167956aac495666383ba9a150facce489..0000000000000000000000000000000000000000 --- a/test_results.log +++ /dev/null @@ -1,25 +0,0 @@ -============================= test session starts ============================== -platform linux -- Python 3.12.3, pytest-8.3.4, pluggy-1.5.0 -rootdir: /home/l_rahbek/danish-dynaword -configfile: pyproject.toml -collected 364 items - -src/tests/test_dataset_schema.py ....................................... [ 10%] -......................................... [ 21%] -src/tests/test_datasheets.py ........................................... [ 33%] -........................................................................ [ 53%] -........................................................................ [ 73%] -............. [ 76%] -src/tests/test_load.py .. [ 77%] -src/tests/test_quality/test_duplicates.py .............................. [ 85%] -..........s [ 88%] -src/tests/test_quality/test_short_texts.py ............................. [ 96%] -........... [ 99%] -src/tests/test_unique_ids.py . [100%] - -=============================== warnings summary =============================== -src/tests/test_quality/test_short_texts.py: 40 warnings - /home/l_rahbek/danish-dynaword/.venv/lib/python3.12/site-packages/datasets/utils/_dill.py:385: DeprecationWarning: co_lnotab is deprecated, use co_lines instead. - --- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html -============ 363 passed, 1 skipped, 40 warnings in 76.69s (0:01:16) ============ diff --git a/src/dynaword/__init__.py b/tests/__init__.py similarity index 100% rename from src/dynaword/__init__.py rename to tests/__init__.py diff --git a/tests/test_load.py b/tests/test_load.py new file mode 100644 index 0000000000000000000000000000000000000000..b492c2fe9ee2678e18c34bec632fa91b15b8c2d2 --- /dev/null +++ b/tests/test_load.py @@ -0,0 +1,9 @@ +from datasets import load_dataset +from pathlib import Path + +def test_dataset_loads(): + repo = Path(__file__).parent.parent + name = str(repo.resolve()) # "danish-foundation-models/danish-gigaword-2" + ds = load_dataset(name, split="train", streaming=True) + sample = next(iter(ds)) + assert isinstance(sample, dict) diff --git a/tests/test_unique_ids.py b/tests/test_unique_ids.py new file mode 100644 index 0000000000000000000000000000000000000000..e94245d5485aa00c9e8c53f4ea6182560c7b90a8 --- /dev/null +++ b/tests/test_unique_ids.py @@ -0,0 +1,12 @@ +from pathlib import Path +from typing import cast + +from datasets import Dataset, load_dataset + + +def test_ensure_ids_are_unique(): + repo = Path(__file__).parent.parent + name = str(repo.resolve()) # "danish-foundation-models/danish-gigaword-2" + ds = load_dataset(name, split="train") + ds = cast(Dataset, ds) + assert len(set(ds["id"])) == len(ds) diff --git a/uv.lock b/uv.lock index 2096275bcfd2dc78c48026b837d70e3bb1118474..cb76cb04aa226110b02f03dfca5e2a0064b0fbfa 100644 --- a/uv.lock +++ b/uv.lock @@ -1,24 +1,18 @@ version = 1 -revision = 3 -requires-python = "==3.12.*" -resolution-markers = [ - "sys_platform == 'darwin'", - "platform_machine == 'aarch64' and sys_platform == 'linux'", - "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')", -] +requires-python = ">=3.13" [[package]] name = "aiohappyeyeballs" version = "2.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/55/e4373e888fdacb15563ef6fa9fa8c8252476ea071e96fb46defac9f18bf2/aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", size = 21977, upload-time = "2024-11-30T18:44:00.701Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/55/e4373e888fdacb15563ef6fa9fa8c8252476ea071e96fb46defac9f18bf2/aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", size = 21977 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/74/fbb6559de3607b3300b9be3cc64e97548d55678e44623db17820dbd20002/aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8", size = 14756, upload-time = "2024-11-30T18:43:39.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/fbb6559de3607b3300b9be3cc64e97548d55678e44623db17820dbd20002/aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8", size = 14756 }, ] [[package]] name = "aiohttp" -version = "3.11.11" +version = "3.11.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -29,23 +23,23 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/ed/f26db39d29cd3cb2f5a3374304c713fe5ab5a0e4c8ee25a0c45cc6adf844/aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e", size = 7669618, upload-time = "2024-12-18T21:20:50.191Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/c4/3b5a937b16f6c2a0ada842a9066aad0b7a5708427d4a202a07bf09c67cbb/aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e", size = 7668832 } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/cf/4bda538c502f9738d6b95ada11603c05ec260807246e15e869fc3ec5de97/aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886", size = 704666, upload-time = "2024-12-18T21:18:49.254Z" }, - { url = "https://files.pythonhosted.org/packages/46/7b/87fcef2cad2fad420ca77bef981e815df6904047d0a1bd6aeded1b0d1d66/aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2", size = 464057, upload-time = "2024-12-18T21:18:51.375Z" }, - { url = "https://files.pythonhosted.org/packages/5a/a6/789e1f17a1b6f4a38939fbc39d29e1d960d5f89f73d0629a939410171bc0/aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c", size = 455996, upload-time = "2024-12-18T21:18:53.11Z" }, - { url = "https://files.pythonhosted.org/packages/b7/dd/485061fbfef33165ce7320db36e530cd7116ee1098e9c3774d15a732b3fd/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a", size = 1682367, upload-time = "2024-12-18T21:18:55.053Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d7/9ec5b3ea9ae215c311d88b2093e8da17e67b8856673e4166c994e117ee3e/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231", size = 1736989, upload-time = "2024-12-18T21:18:56.933Z" }, - { url = "https://files.pythonhosted.org/packages/d6/fb/ea94927f7bfe1d86178c9d3e0a8c54f651a0a655214cce930b3c679b8f64/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e", size = 1793265, upload-time = "2024-12-18T21:19:00.174Z" }, - { url = "https://files.pythonhosted.org/packages/40/7f/6de218084f9b653026bd7063cd8045123a7ba90c25176465f266976d8c82/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8", size = 1691841, upload-time = "2024-12-18T21:19:02.3Z" }, - { url = "https://files.pythonhosted.org/packages/77/e2/992f43d87831cbddb6b09c57ab55499332f60ad6fdbf438ff4419c2925fc/aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8", size = 1619317, upload-time = "2024-12-18T21:19:04.33Z" }, - { url = "https://files.pythonhosted.org/packages/96/74/879b23cdd816db4133325a201287c95bef4ce669acde37f8f1b8669e1755/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c", size = 1641416, upload-time = "2024-12-18T21:19:09.842Z" }, - { url = "https://files.pythonhosted.org/packages/30/98/b123f6b15d87c54e58fd7ae3558ff594f898d7f30a90899718f3215ad328/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab", size = 1646514, upload-time = "2024-12-18T21:19:12.154Z" }, - { url = "https://files.pythonhosted.org/packages/d7/38/257fda3dc99d6978ab943141d5165ec74fd4b4164baa15e9c66fa21da86b/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da", size = 1702095, upload-time = "2024-12-18T21:19:15.51Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f4/ddab089053f9fb96654df5505c0a69bde093214b3c3454f6bfdb1845f558/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853", size = 1734611, upload-time = "2024-12-18T21:19:18.849Z" }, - { url = "https://files.pythonhosted.org/packages/c3/d6/f30b2bc520c38c8aa4657ed953186e535ae84abe55c08d0f70acd72ff577/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e", size = 1694576, upload-time = "2024-12-18T21:19:21.257Z" }, - { url = "https://files.pythonhosted.org/packages/bc/97/b0a88c3f4c6d0020b34045ee6d954058abc870814f6e310c4c9b74254116/aiohttp-3.11.11-cp312-cp312-win32.whl", hash = "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600", size = 411363, upload-time = "2024-12-18T21:19:23.122Z" }, - { url = "https://files.pythonhosted.org/packages/7f/23/cc36d9c398980acaeeb443100f0216f50a7cfe20c67a9fd0a2f1a5a846de/aiohttp-3.11.11-cp312-cp312-win_amd64.whl", hash = "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d", size = 437666, upload-time = "2024-12-18T21:19:26.425Z" }, + { url = "https://files.pythonhosted.org/packages/8c/1d/88bfdbe28a3d1ba5b94a235f188f27726caf8ade9a0e13574848f44fe0fe/aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816", size = 697755 }, + { url = "https://files.pythonhosted.org/packages/86/00/4c4619d6fe5c5be32f74d1422fc719b3e6cd7097af0c9e03877ca9bd4ebc/aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf", size = 460440 }, + { url = "https://files.pythonhosted.org/packages/aa/1c/2f927408f50593a29465d198ec3c57c835c8602330233163e8d89c1093db/aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5", size = 452726 }, + { url = "https://files.pythonhosted.org/packages/06/6a/ff00ed0a2ba45c34b3c366aa5b0004b1a4adcec5a9b5f67dd0648ee1c88a/aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32", size = 1664944 }, + { url = "https://files.pythonhosted.org/packages/02/c2/61923f2a7c2e14d7424b3a526e054f0358f57ccdf5573d4d3d033b01921a/aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01", size = 1717707 }, + { url = "https://files.pythonhosted.org/packages/8a/08/0d3d074b24d377569ec89d476a95ca918443099c0401bb31b331104e35d1/aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34", size = 1774890 }, + { url = "https://files.pythonhosted.org/packages/e8/49/052ada2b6e90ed65f0e6a7e548614621b5f8dcd193cb9415d2e6bcecc94a/aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99", size = 1676945 }, + { url = "https://files.pythonhosted.org/packages/7c/9e/0c48e1a48e072a869b8b5e3920c9f6a8092861524a4a6f159cd7e6fda939/aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39", size = 1602959 }, + { url = "https://files.pythonhosted.org/packages/ab/98/791f979093ff7f67f80344c182cb0ca4c2c60daed397ecaf454cc8d7a5cd/aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e", size = 1618058 }, + { url = "https://files.pythonhosted.org/packages/7b/5d/2d4b05feb3fd68eb7c8335f73c81079b56e582633b91002da695ccb439ef/aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a", size = 1616289 }, + { url = "https://files.pythonhosted.org/packages/50/83/68cc28c00fe681dce6150614f105efe98282da19252cd6e32dfa893bb328/aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542", size = 1685239 }, + { url = "https://files.pythonhosted.org/packages/16/f9/68fc5c8928f63238ce9314f04f3f59d9190a4db924998bb9be99c7aacce8/aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60", size = 1715078 }, + { url = "https://files.pythonhosted.org/packages/3f/e0/3dd3f0451c532c77e35780bafb2b6469a046bc15a6ec2e039475a1d2f161/aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836", size = 1672544 }, + { url = "https://files.pythonhosted.org/packages/a5/b1/3530ab040dd5d7fb016b47115016f9b3a07ea29593b0e07e53dbe06a380c/aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c", size = 409984 }, + { url = "https://files.pythonhosted.org/packages/49/1f/deed34e9fca639a7f873d01150d46925d3e1312051eaa591c1aa1f2e6ddc/aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6", size = 435837 }, ] [[package]] @@ -55,54 +49,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424, upload-time = "2024-12-13T17:10:40.86Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597, upload-time = "2024-12-13T17:10:38.469Z" }, -] - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597 }, ] [[package]] name = "appnope" version = "0.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170 } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 }, ] [[package]] name = "asttokens" version = "3.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978 } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" }, + { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918 }, ] [[package]] name = "attrs" -version = "24.3.0" +version = "24.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984, upload-time = "2024-12-16T06:59:29.899Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397, upload-time = "2024-12-16T06:59:26.977Z" }, + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, ] [[package]] name = "certifi" -version = "2024.12.14" +version = "2024.8.30" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010, upload-time = "2024-12-14T13:52:38.02Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927, upload-time = "2024-12-14T13:52:36.114Z" }, + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, ] [[package]] @@ -112,50 +97,52 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, ] [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload-time = "2024-12-24T18:10:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload-time = "2024-12-24T18:10:44.272Z" }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload-time = "2024-12-24T18:10:45.492Z" }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload-time = "2024-12-24T18:10:47.898Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload-time = "2024-12-24T18:10:50.589Z" }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload-time = "2024-12-24T18:10:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload-time = "2024-12-24T18:10:53.789Z" }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload-time = "2024-12-24T18:10:55.048Z" }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload-time = "2024-12-24T18:10:57.647Z" }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload-time = "2024-12-24T18:10:59.43Z" }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload-time = "2024-12-24T18:11:00.676Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload-time = "2024-12-24T18:11:01.952Z" }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload-time = "2024-12-24T18:11:03.142Z" }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] [[package]] @@ -165,9 +152,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210, upload-time = "2024-03-12T16:53:41.133Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180, upload-time = "2024-03-12T16:53:39.226Z" }, + { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180 }, ] [[package]] @@ -177,27 +164,64 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/c2/fc7193cc5383637ff390a712e88e4ded0452c9fbcf84abe3de5ea3df1866/contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699", size = 13465753, upload-time = "2024-11-12T11:00:59.118Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/37/6b/175f60227d3e7f5f1549fcb374592be311293132207e451c3d7c654c25fb/contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509", size = 271494, upload-time = "2024-11-12T10:54:23.6Z" }, - { url = "https://files.pythonhosted.org/packages/6b/6a/7833cfae2c1e63d1d8875a50fd23371394f540ce809d7383550681a1fa64/contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc", size = 255444, upload-time = "2024-11-12T10:54:28.267Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b3/7859efce66eaca5c14ba7619791b084ed02d868d76b928ff56890d2d059d/contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454", size = 307628, upload-time = "2024-11-12T10:54:33.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/b2/011415f5e3f0a50b1e285a0bf78eb5d92a4df000553570f0851b6e309076/contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80", size = 347271, upload-time = "2024-11-12T10:54:38.816Z" }, - { url = "https://files.pythonhosted.org/packages/84/7d/ef19b1db0f45b151ac78c65127235239a8cf21a59d1ce8507ce03e89a30b/contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec", size = 318906, upload-time = "2024-11-12T10:54:44.132Z" }, - { url = "https://files.pythonhosted.org/packages/ba/99/6794142b90b853a9155316c8f470d2e4821fe6f086b03e372aca848227dd/contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9", size = 323622, upload-time = "2024-11-12T10:54:48.788Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0f/37d2c84a900cd8eb54e105f4fa9aebd275e14e266736778bb5dccbf3bbbb/contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b", size = 1266699, upload-time = "2024-11-12T10:55:04.016Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8a/deb5e11dc7d9cc8f0f9c8b29d4f062203f3af230ba83c30a6b161a6effc9/contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d", size = 1326395, upload-time = "2024-11-12T10:55:20.547Z" }, - { url = "https://files.pythonhosted.org/packages/1a/35/7e267ae7c13aaf12322ccc493531f1e7f2eb8fba2927b9d7a05ff615df7a/contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e", size = 175354, upload-time = "2024-11-12T10:55:24.377Z" }, - { url = "https://files.pythonhosted.org/packages/a1/35/c2de8823211d07e8a79ab018ef03960716c5dff6f4d5bff5af87fd682992/contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d", size = 220971, upload-time = "2024-11-12T10:55:27.971Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/25/c2/fc7193cc5383637ff390a712e88e4ded0452c9fbcf84abe3de5ea3df1866/contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699", size = 13465753 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/e7/de62050dce687c5e96f946a93546910bc67e483fe05324439e329ff36105/contourpy-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a761d9ccfc5e2ecd1bf05534eda382aa14c3e4f9205ba5b1684ecfe400716ef2", size = 271548 }, + { url = "https://files.pythonhosted.org/packages/78/4d/c2a09ae014ae984c6bdd29c11e74d3121b25eaa117eca0bb76340efd7e1c/contourpy-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:523a8ee12edfa36f6d2a49407f705a6ef4c5098de4f498619787e272de93f2d5", size = 255576 }, + { url = "https://files.pythonhosted.org/packages/ab/8a/915380ee96a5638bda80cd061ccb8e666bfdccea38d5741cb69e6dbd61fc/contourpy-1.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece6df05e2c41bd46776fbc712e0996f7c94e0d0543af1656956d150c4ca7c81", size = 306635 }, + { url = "https://files.pythonhosted.org/packages/29/5c/c83ce09375428298acd4e6582aeb68b1e0d1447f877fa993d9bf6cd3b0a0/contourpy-1.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:573abb30e0e05bf31ed067d2f82500ecfdaec15627a59d63ea2d95714790f5c2", size = 345925 }, + { url = "https://files.pythonhosted.org/packages/29/63/5b52f4a15e80c66c8078a641a3bfacd6e07106835682454647aca1afc852/contourpy-1.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fa36448e6a3a1a9a2ba23c02012c43ed88905ec80163f2ffe2421c7192a5d7", size = 318000 }, + { url = "https://files.pythonhosted.org/packages/9a/e2/30ca086c692691129849198659bf0556d72a757fe2769eb9620a27169296/contourpy-1.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ea9924d28fc5586bf0b42d15f590b10c224117e74409dd7a0be3b62b74a501c", size = 322689 }, + { url = "https://files.pythonhosted.org/packages/6b/77/f37812ef700f1f185d348394debf33f22d531e714cf6a35d13d68a7003c7/contourpy-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b75aa69cb4d6f137b36f7eb2ace9280cfb60c55dc5f61c731fdf6f037f958a3", size = 1268413 }, + { url = "https://files.pythonhosted.org/packages/3f/6d/ce84e79cdd128542ebeb268f84abb4b093af78e7f8ec504676673d2675bc/contourpy-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:041b640d4ec01922083645a94bb3b2e777e6b626788f4095cf21abbe266413c1", size = 1326530 }, + { url = "https://files.pythonhosted.org/packages/72/22/8282f4eae20c73c89bee7a82a19c4e27af9b57bb602ecaa00713d5bdb54d/contourpy-1.3.1-cp313-cp313-win32.whl", hash = "sha256:36987a15e8ace5f58d4d5da9dca82d498c2bbb28dff6e5d04fbfcc35a9cb3a82", size = 175315 }, + { url = "https://files.pythonhosted.org/packages/e3/d5/28bca491f65312b438fbf076589dcde7f6f966b196d900777f5811b9c4e2/contourpy-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7895f46d47671fa7ceec40f31fae721da51ad34bdca0bee83e38870b1f47ffd", size = 220987 }, + { url = "https://files.pythonhosted.org/packages/2f/24/a4b285d6adaaf9746e4700932f579f1a7b6f9681109f694cfa233ae75c4e/contourpy-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ddeb796389dadcd884c7eb07bd14ef12408aaae358f0e2ae24114d797eede30", size = 285001 }, + { url = "https://files.pythonhosted.org/packages/48/1d/fb49a401b5ca4f06ccf467cd6c4f1fd65767e63c21322b29b04ec40b40b9/contourpy-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19c1555a6801c2f084c7ddc1c6e11f02eb6a6016ca1318dd5452ba3f613a1751", size = 268553 }, + { url = "https://files.pythonhosted.org/packages/79/1e/4aef9470d13fd029087388fae750dccb49a50c012a6c8d1d634295caa644/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841ad858cff65c2c04bf93875e384ccb82b654574a6d7f30453a04f04af71342", size = 310386 }, + { url = "https://files.pythonhosted.org/packages/b0/34/910dc706ed70153b60392b5305c708c9810d425bde12499c9184a1100888/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4318af1c925fb9a4fb190559ef3eec206845f63e80fb603d47f2d6d67683901c", size = 349806 }, + { url = "https://files.pythonhosted.org/packages/31/3c/faee6a40d66d7f2a87f7102236bf4780c57990dd7f98e5ff29881b1b1344/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14c102b0eab282427b662cb590f2e9340a9d91a1c297f48729431f2dcd16e14f", size = 321108 }, + { url = "https://files.pythonhosted.org/packages/17/69/390dc9b20dd4bb20585651d7316cc3054b7d4a7b4f8b710b2b698e08968d/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e806338bfeaa006acbdeba0ad681a10be63b26e1b17317bfac3c5d98f36cda", size = 327291 }, + { url = "https://files.pythonhosted.org/packages/ef/74/7030b67c4e941fe1e5424a3d988080e83568030ce0355f7c9fc556455b01/contourpy-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4d76d5993a34ef3df5181ba3c92fabb93f1eaa5729504fb03423fcd9f3177242", size = 1263752 }, + { url = "https://files.pythonhosted.org/packages/f0/ed/92d86f183a8615f13f6b9cbfc5d4298a509d6ce433432e21da838b4b63f4/contourpy-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:89785bb2a1980c1bd87f0cb1517a71cde374776a5f150936b82580ae6ead44a1", size = 1318403 }, + { url = "https://files.pythonhosted.org/packages/b3/0e/c8e4950c77dcfc897c71d61e56690a0a9df39543d2164040301b5df8e67b/contourpy-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:8eb96e79b9f3dcadbad2a3891672f81cdcab7f95b27f28f1c67d75f045b6b4f1", size = 185117 }, + { url = "https://files.pythonhosted.org/packages/c1/31/1ae946f11dfbd229222e6d6ad8e7bd1891d3d48bde5fbf7a0beb9491f8e3/contourpy-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:287ccc248c9e0d0566934e7d606201abd74761b5703d804ff3df8935f523d546", size = 236668 }, ] [[package]] name = "cycler" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321 }, +] + +[[package]] +name = "danish-gigaword-2" +version = "1.0.1" +source = { virtual = "." } +dependencies = [ + { name = "datasets" }, + { name = "ipykernel" }, + { name = "matplotlib" }, + { name = "numpy" }, + { name = "plotnine" }, + { name = "pytest" }, + { name = "seaborn" }, + { name = "toml" }, +] + +[package.metadata] +requires-dist = [ + { name = "datasets", specifier = ">=3.0.0" }, + { name = "ipykernel", specifier = ">=6.29.5" }, + { name = "matplotlib", specifier = ">=3.10.0" }, + { name = "numpy", specifier = ">=2.2.0" }, + { name = "plotnine", specifier = ">=0.14.3" }, + { name = "pytest", specifier = ">=8.3.4" }, + { name = "seaborn", specifier = ">=0.13.2" }, + { name = "toml", specifier = ">=0.10.2" }, ] [[package]] @@ -220,162 +244,108 @@ dependencies = [ { name = "tqdm" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/48/744286c044e2b942d4fa67f92816126522ad1f0675def0ea3264e6242005/datasets-3.2.0.tar.gz", hash = "sha256:9a6e1a356052866b5dbdd9c9eedb000bf3fc43d986e3584d9b028f4976937229", size = 558366, upload-time = "2024-12-10T16:56:38.162Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/48/744286c044e2b942d4fa67f92816126522ad1f0675def0ea3264e6242005/datasets-3.2.0.tar.gz", hash = "sha256:9a6e1a356052866b5dbdd9c9eedb000bf3fc43d986e3584d9b028f4976937229", size = 558366 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/84/0df6c5981f5fc722381662ff8cfbdf8aad64bec875f75d80b55bfef394ce/datasets-3.2.0-py3-none-any.whl", hash = "sha256:f3d2ba2698b7284a4518019658596a6a8bc79f31e51516524249d6c59cf0fe2a", size = 480647, upload-time = "2024-12-10T16:56:34.742Z" }, + { url = "https://files.pythonhosted.org/packages/d7/84/0df6c5981f5fc722381662ff8cfbdf8aad64bec875f75d80b55bfef394ce/datasets-3.2.0-py3-none-any.whl", hash = "sha256:f3d2ba2698b7284a4518019658596a6a8bc79f31e51516524249d6c59cf0fe2a", size = 480647 }, ] [[package]] name = "debugpy" version = "1.8.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/e7/666f4c9b0e24796af50aadc28d36d21c2e01e831a934535f956e09b3650c/debugpy-1.8.11.tar.gz", hash = "sha256:6ad2688b69235c43b020e04fecccdf6a96c8943ca9c2fb340b8adc103c655e57", size = 1640124, upload-time = "2024-12-13T17:21:07.233Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/e7/666f4c9b0e24796af50aadc28d36d21c2e01e831a934535f956e09b3650c/debugpy-1.8.11.tar.gz", hash = "sha256:6ad2688b69235c43b020e04fecccdf6a96c8943ca9c2fb340b8adc103c655e57", size = 1640124 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/ae/2cf26f3111e9d94384d9c01e9d6170188b0aeda15b60a4ac6457f7c8a26f/debugpy-1.8.11-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:84e511a7545d11683d32cdb8f809ef63fc17ea2a00455cc62d0a4dbb4ed1c308", size = 2498756, upload-time = "2024-12-13T17:21:35.856Z" }, - { url = "https://files.pythonhosted.org/packages/b0/16/ec551789d547541a46831a19aa15c147741133da188e7e6acf77510545a7/debugpy-1.8.11-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce291a5aca4985d82875d6779f61375e959208cdf09fcec40001e65fb0a54768", size = 4219136, upload-time = "2024-12-13T17:21:37.526Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/b2b3ce673c55f882d27a6eb04a5f0c68bcad6b742ac08a86d8392ae58030/debugpy-1.8.11-cp312-cp312-win32.whl", hash = "sha256:28e45b3f827d3bf2592f3cf7ae63282e859f3259db44ed2b129093ca0ac7940b", size = 5224440, upload-time = "2024-12-13T17:21:41.033Z" }, - { url = "https://files.pythonhosted.org/packages/77/09/b1f05be802c1caef5b3efc042fc6a7cadd13d8118b072afd04a9b9e91e06/debugpy-1.8.11-cp312-cp312-win_amd64.whl", hash = "sha256:44b1b8e6253bceada11f714acf4309ffb98bfa9ac55e4fce14f9e5d4484287a1", size = 5264578, upload-time = "2024-12-13T17:21:44.242Z" }, - { url = "https://files.pythonhosted.org/packages/77/0a/d29a5aacf47b4383ed569b8478c02d59ee3a01ad91224d2cff8562410e43/debugpy-1.8.11-py2.py3-none-any.whl", hash = "sha256:0e22f846f4211383e6a416d04b4c13ed174d24cc5d43f5fd52e7821d0ebc8920", size = 5226874, upload-time = "2024-12-13T17:22:15.097Z" }, + { url = "https://files.pythonhosted.org/packages/2e/66/931dc2479aa8fbf362dc6dcee707d895a84b0b2d7b64020135f20b8db1ed/debugpy-1.8.11-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:8988f7163e4381b0da7696f37eec7aca19deb02e500245df68a7159739bbd0d3", size = 2483651 }, + { url = "https://files.pythonhosted.org/packages/10/07/6c171d0fe6b8d237e35598b742f20ba062511b3a4631938cc78eefbbf847/debugpy-1.8.11-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c1f6a173d1140e557347419767d2b14ac1c9cd847e0b4c5444c7f3144697e4e", size = 4213770 }, + { url = "https://files.pythonhosted.org/packages/89/f1/0711da6ac250d4fe3bf7b3e9b14b4a86e82a98b7825075c07e19bab8da3d/debugpy-1.8.11-cp313-cp313-win32.whl", hash = "sha256:bb3b15e25891f38da3ca0740271e63ab9db61f41d4d8541745cfc1824252cb28", size = 5223911 }, + { url = "https://files.pythonhosted.org/packages/56/98/5e27fa39050749ed460025bcd0034a0a5e78a580a14079b164cc3abdeb98/debugpy-1.8.11-cp313-cp313-win_amd64.whl", hash = "sha256:d8768edcbeb34da9e11bcb8b5c2e0958d25218df7a6e56adf415ef262cd7b6d1", size = 5264166 }, + { url = "https://files.pythonhosted.org/packages/77/0a/d29a5aacf47b4383ed569b8478c02d59ee3a01ad91224d2cff8562410e43/debugpy-1.8.11-py2.py3-none-any.whl", hash = "sha256:0e22f846f4211383e6a416d04b4c13ed174d24cc5d43f5fd52e7821d0ebc8920", size = 5226874 }, ] [[package]] name = "decorator" version = "5.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016, upload-time = "2022-01-07T08:20:05.666Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073, upload-time = "2022-01-07T08:20:03.734Z" }, + { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, ] [[package]] name = "dill" version = "0.3.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847, upload-time = "2024-01-27T23:42:16.145Z" } +sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252, upload-time = "2024-01-27T23:42:14.239Z" }, -] - -[[package]] -name = "dynaword" -version = "1.2.12" -source = { editable = "." } -dependencies = [ - { name = "datasets" }, - { name = "kaleido" }, - { name = "nbformat" }, - { name = "plotly" }, - { name = "plotnine" }, - { name = "pydantic" }, - { name = "tabulate" }, - { name = "tomlkit" }, - { name = "transformers" }, -] - -[package.dev-dependencies] -dev = [ - { name = "ipykernel" }, - { name = "pip" }, - { name = "pytest" }, - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "datasets", specifier = ">=3.0.0" }, - { name = "kaleido", specifier = "==0.2.1" }, - { name = "nbformat", specifier = ">=4.2.0" }, - { name = "plotly", specifier = ">=6.0.1" }, - { name = "plotnine", specifier = ">=0.14.5" }, - { name = "pydantic", specifier = ">=2.10.4" }, - { name = "tabulate", specifier = ">=0.9.0" }, - { name = "tomlkit", specifier = ">=0.13.2" }, - { name = "transformers", specifier = ">=4.47.1" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "ipykernel", specifier = ">=6.29.5" }, - { name = "pip", specifier = ">=25.0.1" }, - { name = "pytest", specifier = ">=8.3.4" }, - { name = "ruff", specifier = ">=0.8.3" }, + { url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252 }, ] [[package]] name = "executing" version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485, upload-time = "2024-09-01T12:37:35.708Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805, upload-time = "2024-09-01T12:37:33.007Z" }, -] - -[[package]] -name = "fastjsonschema" -version = "2.21.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/50/4b769ce1ac4071a1ef6d86b1a3fb56cdc3a37615e8c5519e1af96cdac366/fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4", size = 373939, upload-time = "2024-12-02T10:55:15.133Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924, upload-time = "2024-12-02T10:55:07.599Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 }, ] [[package]] name = "filelock" version = "3.16.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037, upload-time = "2024-09-17T19:02:01.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" }, + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, ] [[package]] name = "fonttools" version = "4.55.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/61/a300d1574dc381393424047c0396a0e213db212e28361123af9830d71a8d/fonttools-4.55.3.tar.gz", hash = "sha256:3983313c2a04d6cc1fe9251f8fc647754cf49a61dac6cb1e7249ae67afaafc45", size = 3498155, upload-time = "2024-12-10T21:39:26.588Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/61/a300d1574dc381393424047c0396a0e213db212e28361123af9830d71a8d/fonttools-4.55.3.tar.gz", hash = "sha256:3983313c2a04d6cc1fe9251f8fc647754cf49a61dac6cb1e7249ae67afaafc45", size = 3498155 } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/58/fbcf5dff7e3ea844bb00c4d806ca1e339e1f2dce5529633bf4842c0c9a1f/fonttools-4.55.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f9e736f60f4911061235603a6119e72053073a12c6d7904011df2d8fad2c0e35", size = 2765380, upload-time = "2024-12-10T21:37:33.818Z" }, - { url = "https://files.pythonhosted.org/packages/81/dd/da6e329e51919b4f421c8738f3497e2ab08c168e76aaef7b6d5351862bdf/fonttools-4.55.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a8aa2c5e5b8b3bcb2e4538d929f6589a5c6bdb84fd16e2ed92649fb5454f11c", size = 2297940, upload-time = "2024-12-10T21:37:36.876Z" }, - { url = "https://files.pythonhosted.org/packages/00/44/f5ee560858425c99ef07e04919e736db09d6416408e5a8d3bbfb4a6623fd/fonttools-4.55.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07f8288aacf0a38d174445fc78377a97fb0b83cfe352a90c9d9c1400571963c7", size = 4793327, upload-time = "2024-12-10T21:37:39.696Z" }, - { url = "https://files.pythonhosted.org/packages/24/da/0a001926d791c55e29ac3c52964957a20dbc1963615446b568b7432891c3/fonttools-4.55.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8d5e8916c0970fbc0f6f1bece0063363bb5857a7f170121a4493e31c3db3314", size = 4865624, upload-time = "2024-12-10T21:37:42.531Z" }, - { url = "https://files.pythonhosted.org/packages/3d/d8/1edd8b13a427a9fb6418373437caa586c0caa57f260af8e0548f4d11e340/fonttools-4.55.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ae3b6600565b2d80b7c05acb8e24d2b26ac407b27a3f2e078229721ba5698427", size = 4774166, upload-time = "2024-12-10T21:37:45.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ec/ade054097976c3d6debc9032e09a351505a0196aa5493edf021be376f75e/fonttools-4.55.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54153c49913f45065c8d9e6d0c101396725c5621c8aee744719300f79771d75a", size = 5001832, upload-time = "2024-12-10T21:37:49.699Z" }, - { url = "https://files.pythonhosted.org/packages/e2/cd/233f0e31ad799bb91fc78099c8b4e5ec43b85a131688519640d6bae46f6a/fonttools-4.55.3-cp312-cp312-win32.whl", hash = "sha256:827e95fdbbd3e51f8b459af5ea10ecb4e30af50221ca103bea68218e9615de07", size = 2162228, upload-time = "2024-12-10T21:37:53.524Z" }, - { url = "https://files.pythonhosted.org/packages/46/45/a498b5291f6c0d91b2394b1ed7447442a57d1c9b9cf8f439aee3c316a56e/fonttools-4.55.3-cp312-cp312-win_amd64.whl", hash = "sha256:e6e8766eeeb2de759e862004aa11a9ea3d6f6d5ec710551a88b476192b64fd54", size = 2209118, upload-time = "2024-12-10T21:37:56.951Z" }, - { url = "https://files.pythonhosted.org/packages/99/3b/406d17b1f63e04a82aa621936e6e1c53a8c05458abd66300ac85ea7f9ae9/fonttools-4.55.3-py3-none-any.whl", hash = "sha256:f412604ccbeee81b091b420272841e5ec5ef68967a9790e80bffd0e30b8e2977", size = 1111638, upload-time = "2024-12-10T21:39:22.986Z" }, + { url = "https://files.pythonhosted.org/packages/9c/9f/00142a19bad96eeeb1aed93f567adc19b7f2c1af6f5bc0a1c3de90b4b1ac/fonttools-4.55.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a430178ad3e650e695167cb53242dae3477b35c95bef6525b074d87493c4bf29", size = 2752812 }, + { url = "https://files.pythonhosted.org/packages/b0/20/14b8250d63ba65e162091fb0dda07730f90c303bbf5257e9ddacec7230d9/fonttools-4.55.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:529cef2ce91dc44f8e407cc567fae6e49a1786f2fefefa73a294704c415322a4", size = 2291521 }, + { url = "https://files.pythonhosted.org/packages/34/47/a681cfd10245eb74f65e491a934053ec75c4af639655446558f29818e45e/fonttools-4.55.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e75f12c82127486fac2d8bfbf5bf058202f54bf4f158d367e41647b972342ca", size = 4770980 }, + { url = "https://files.pythonhosted.org/packages/d2/6c/a7066afc19db0705a12efd812e19c32cde2b9514eb714659522f2ebd60b6/fonttools-4.55.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:859c358ebf41db18fb72342d3080bce67c02b39e86b9fbcf1610cca14984841b", size = 4845534 }, + { url = "https://files.pythonhosted.org/packages/0c/a2/3c204fbabbfd845d9bdcab9ae35279d41e9a4bf5c80a0a2708f9c5a195d6/fonttools-4.55.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:546565028e244a701f73df6d8dd6be489d01617863ec0c6a42fa25bf45d43048", size = 4753910 }, + { url = "https://files.pythonhosted.org/packages/6e/8c/b4cb3592880340b89e4ef6601b531780bba73862332a6451d78fe135d6cb/fonttools-4.55.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:aca318b77f23523309eec4475d1fbbb00a6b133eb766a8bdc401faba91261abe", size = 4976411 }, + { url = "https://files.pythonhosted.org/packages/fc/a8/4bf98840ff89fcc188470b59daec57322178bf36d2f4f756cd19a42a826b/fonttools-4.55.3-cp313-cp313-win32.whl", hash = "sha256:8c5ec45428edaa7022f1c949a632a6f298edc7b481312fc7dc258921e9399628", size = 2160178 }, + { url = "https://files.pythonhosted.org/packages/e6/57/4cc35004605416df3225ff362f3455cf09765db00df578ae9e46d0fefd23/fonttools-4.55.3-cp313-cp313-win_amd64.whl", hash = "sha256:11e5de1ee0d95af4ae23c1a138b184b7f06e0b6abacabf1d0db41c90b03d834b", size = 2206102 }, + { url = "https://files.pythonhosted.org/packages/99/3b/406d17b1f63e04a82aa621936e6e1c53a8c05458abd66300ac85ea7f9ae9/fonttools-4.55.3-py3-none-any.whl", hash = "sha256:f412604ccbeee81b091b420272841e5ec5ef68967a9790e80bffd0e30b8e2977", size = 1111638 }, ] [[package]] name = "frozenlist" version = "1.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/ed/0f4cec13a93c02c47ec32d81d11c0c1efbadf4a471e3f3ce7cad366cbbd3/frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817", size = 39930, upload-time = "2024-10-23T09:48:29.903Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/ed/0f4cec13a93c02c47ec32d81d11c0c1efbadf4a471e3f3ce7cad366cbbd3/frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817", size = 39930 } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/73/fa6d1a96ab7fd6e6d1c3500700963eab46813847f01ef0ccbaa726181dd5/frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21", size = 94026, upload-time = "2024-10-23T09:46:58.601Z" }, - { url = "https://files.pythonhosted.org/packages/ab/04/ea8bf62c8868b8eada363f20ff1b647cf2e93377a7b284d36062d21d81d1/frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d", size = 54150, upload-time = "2024-10-23T09:46:59.608Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9a/8e479b482a6f2070b26bda572c5e6889bb3ba48977e81beea35b5ae13ece/frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e", size = 51927, upload-time = "2024-10-23T09:47:00.625Z" }, - { url = "https://files.pythonhosted.org/packages/e3/12/2aad87deb08a4e7ccfb33600871bbe8f0e08cb6d8224371387f3303654d7/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a", size = 282647, upload-time = "2024-10-23T09:47:01.992Z" }, - { url = "https://files.pythonhosted.org/packages/77/f2/07f06b05d8a427ea0060a9cef6e63405ea9e0d761846b95ef3fb3be57111/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a", size = 289052, upload-time = "2024-10-23T09:47:04.039Z" }, - { url = "https://files.pythonhosted.org/packages/bd/9f/8bf45a2f1cd4aa401acd271b077989c9267ae8463e7c8b1eb0d3f561b65e/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee", size = 291719, upload-time = "2024-10-23T09:47:05.58Z" }, - { url = "https://files.pythonhosted.org/packages/41/d1/1f20fd05a6c42d3868709b7604c9f15538a29e4f734c694c6bcfc3d3b935/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6", size = 267433, upload-time = "2024-10-23T09:47:07.807Z" }, - { url = "https://files.pythonhosted.org/packages/af/f2/64b73a9bb86f5a89fb55450e97cd5c1f84a862d4ff90d9fd1a73ab0f64a5/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e", size = 283591, upload-time = "2024-10-23T09:47:09.645Z" }, - { url = "https://files.pythonhosted.org/packages/29/e2/ffbb1fae55a791fd6c2938dd9ea779509c977435ba3940b9f2e8dc9d5316/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9", size = 273249, upload-time = "2024-10-23T09:47:10.808Z" }, - { url = "https://files.pythonhosted.org/packages/2e/6e/008136a30798bb63618a114b9321b5971172a5abddff44a100c7edc5ad4f/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039", size = 271075, upload-time = "2024-10-23T09:47:11.938Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f0/4e71e54a026b06724cec9b6c54f0b13a4e9e298cc8db0f82ec70e151f5ce/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784", size = 285398, upload-time = "2024-10-23T09:47:14.071Z" }, - { url = "https://files.pythonhosted.org/packages/4d/36/70ec246851478b1c0b59f11ef8ade9c482ff447c1363c2bd5fad45098b12/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631", size = 294445, upload-time = "2024-10-23T09:47:15.318Z" }, - { url = "https://files.pythonhosted.org/packages/37/e0/47f87544055b3349b633a03c4d94b405956cf2437f4ab46d0928b74b7526/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f", size = 280569, upload-time = "2024-10-23T09:47:17.149Z" }, - { url = "https://files.pythonhosted.org/packages/f9/7c/490133c160fb6b84ed374c266f42800e33b50c3bbab1652764e6e1fc498a/frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8", size = 44721, upload-time = "2024-10-23T09:47:19.012Z" }, - { url = "https://files.pythonhosted.org/packages/b1/56/4e45136ffc6bdbfa68c29ca56ef53783ef4c2fd395f7cbf99a2624aa9aaa/frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f", size = 51329, upload-time = "2024-10-23T09:47:20.177Z" }, - { url = "https://files.pythonhosted.org/packages/c6/c8/a5be5b7550c10858fcf9b0ea054baccab474da77d37f1e828ce043a3a5d4/frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3", size = 11901, upload-time = "2024-10-23T09:48:28.851Z" }, + { url = "https://files.pythonhosted.org/packages/da/3b/915f0bca8a7ea04483622e84a9bd90033bab54bdf485479556c74fd5eaf5/frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953", size = 91538 }, + { url = "https://files.pythonhosted.org/packages/c7/d1/a7c98aad7e44afe5306a2b068434a5830f1470675f0e715abb86eb15f15b/frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0", size = 52849 }, + { url = "https://files.pythonhosted.org/packages/3a/c8/76f23bf9ab15d5f760eb48701909645f686f9c64fbb8982674c241fbef14/frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2", size = 50583 }, + { url = "https://files.pythonhosted.org/packages/1f/22/462a3dd093d11df623179d7754a3b3269de3b42de2808cddef50ee0f4f48/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f", size = 265636 }, + { url = "https://files.pythonhosted.org/packages/80/cf/e075e407fc2ae7328155a1cd7e22f932773c8073c1fc78016607d19cc3e5/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608", size = 270214 }, + { url = "https://files.pythonhosted.org/packages/a1/58/0642d061d5de779f39c50cbb00df49682832923f3d2ebfb0fedf02d05f7f/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b", size = 273905 }, + { url = "https://files.pythonhosted.org/packages/ab/66/3fe0f5f8f2add5b4ab7aa4e199f767fd3b55da26e3ca4ce2cc36698e50c4/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840", size = 250542 }, + { url = "https://files.pythonhosted.org/packages/f6/b8/260791bde9198c87a465224e0e2bb62c4e716f5d198fc3a1dacc4895dbd1/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439", size = 267026 }, + { url = "https://files.pythonhosted.org/packages/2e/a4/3d24f88c527f08f8d44ade24eaee83b2627793fa62fa07cbb7ff7a2f7d42/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de", size = 257690 }, + { url = "https://files.pythonhosted.org/packages/de/9a/d311d660420b2beeff3459b6626f2ab4fb236d07afbdac034a4371fe696e/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641", size = 253893 }, + { url = "https://files.pythonhosted.org/packages/c6/23/e491aadc25b56eabd0f18c53bb19f3cdc6de30b2129ee0bc39cd387cd560/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e", size = 267006 }, + { url = "https://files.pythonhosted.org/packages/08/c4/ab918ce636a35fb974d13d666dcbe03969592aeca6c3ab3835acff01f79c/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9", size = 276157 }, + { url = "https://files.pythonhosted.org/packages/c0/29/3b7a0bbbbe5a34833ba26f686aabfe982924adbdcafdc294a7a129c31688/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03", size = 264642 }, + { url = "https://files.pythonhosted.org/packages/ab/42/0595b3dbffc2e82d7fe658c12d5a5bafcd7516c6bf2d1d1feb5387caa9c1/frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c", size = 44914 }, + { url = "https://files.pythonhosted.org/packages/17/c4/b7db1206a3fea44bf3b838ca61deb6f74424a8a5db1dd53ecb21da669be6/frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28", size = 51167 }, + { url = "https://files.pythonhosted.org/packages/c6/c8/a5be5b7550c10858fcf9b0ea054baccab474da77d37f1e828ce043a3a5d4/frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3", size = 11901 }, ] [[package]] name = "fsspec" version = "2024.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/62/7c/12b0943011daaaa9c35c2a2e22e5eb929ac90002f08f1259d69aedad84de/fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8", size = 286206, upload-time = "2024-09-04T15:06:57.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/7c/12b0943011daaaa9c35c2a2e22e5eb929ac90002f08f1259d69aedad84de/fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8", size = 286206 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b", size = 179253, upload-time = "2024-09-04T15:06:55.908Z" }, + { url = "https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b", size = 179253 }, ] [package.optional-dependencies] @@ -385,7 +355,7 @@ http = [ [[package]] name = "huggingface-hub" -version = "0.27.0" +version = "0.26.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -396,27 +366,27 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/c6/e3709b61de8e7832dbe19f0d9637e81356cede733d99359fbce125423774/huggingface_hub-0.27.0.tar.gz", hash = "sha256:902cce1a1be5739f5589e560198a65a8edcfd3b830b1666f36e4b961f0454fac", size = 379286, upload-time = "2024-12-16T13:13:35.857Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/21/2be5c66f29e798650a3e66bb350dee63bd9ab02cfc3ed7197cf4a905203e/huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b", size = 375951 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/8c/fbdc0a88a622d9fa54e132d7bf3ee03ec602758658a2db5b339a65be2cfe/huggingface_hub-0.27.0-py3-none-any.whl", hash = "sha256:8f2e834517f1f1ddf1ecc716f91b120d7333011b7485f665a9a412eacb1a2a81", size = 450537, upload-time = "2024-12-16T13:13:32.181Z" }, + { url = "https://files.pythonhosted.org/packages/44/5a/dc6af87c61f89b23439eb95521e4e99862636cfd538ae12fd36be5483e5f/huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924", size = 447766 }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] [[package]] name = "iniconfig" version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, ] [[package]] @@ -424,7 +394,7 @@ name = "ipykernel" version = "6.29.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "appnope", marker = "sys_platform == 'darwin'" }, + { name = "appnope", marker = "platform_system == 'Darwin'" }, { name = "comm" }, { name = "debugpy" }, { name = "ipython" }, @@ -438,14 +408,14 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367, upload-time = "2024-07-01T14:07:22.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173, upload-time = "2024-07-01T14:07:19.603Z" }, + { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173 }, ] [[package]] name = "ipython" -version = "8.31.0" +version = "8.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -458,9 +428,9 @@ dependencies = [ { name = "stack-data" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/35/6f90fdddff7a08b7b715fccbd2427b5212c9525cd043d26fdc45bee0708d/ipython-8.31.0.tar.gz", hash = "sha256:b6a2274606bec6166405ff05e54932ed6e5cfecaca1fc05f2cacde7bb074d70b", size = 5501011, upload-time = "2024-12-20T12:34:22.61Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/8b/710af065ab8ed05649afa5bd1e07401637c9ec9fb7cfda9eac7e91e9fbd4/ipython-8.30.0.tar.gz", hash = "sha256:cb0a405a306d2995a5cbb9901894d240784a9f341394c6ba3f4fe8c6eb89ff6e", size = 5592205 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/60/d0feb6b6d9fe4ab89fe8fe5b47cbf6cd936bfd9f1e7ffa9d0015425aeed6/ipython-8.31.0-py3-none-any.whl", hash = "sha256:46ec58f8d3d076a61d128fe517a51eb730e3aaf0c184ea8c17d16e366660c6a6", size = 821583, upload-time = "2024-12-20T12:34:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f3/1332ba2f682b07b304ad34cad2f003adcfeb349486103f4b632335074a7c/ipython-8.30.0-py3-none-any.whl", hash = "sha256:85ec56a7e20f6c38fce7727dcca699ae4ffc85985aa7b23635a8008f918ae321", size = 820765 }, ] [[package]] @@ -470,36 +440,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "parso" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, -] - -[[package]] -name = "jsonschema" -version = "4.23.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "jsonschema-specifications" }, - { name = "referencing" }, - { name = "rpds-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778, upload-time = "2024-07-08T18:40:05.546Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287 } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462, upload-time = "2024-07-08T18:40:00.165Z" }, -] - -[[package]] -name = "jsonschema-specifications" -version = "2025.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "referencing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, + { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278 }, ] [[package]] @@ -513,9 +456,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, + { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105 }, ] [[package]] @@ -527,45 +470,33 @@ dependencies = [ { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/11/b56381fa6c3f4cc5d2cf54a7dbf98ad9aa0b339ef7a601d6053538b079a7/jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9", size = 87629, upload-time = "2024-03-12T12:37:35.652Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/11/b56381fa6c3f4cc5d2cf54a7dbf98ad9aa0b339ef7a601d6053538b079a7/jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9", size = 87629 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965, upload-time = "2024-03-12T12:37:32.36Z" }, -] - -[[package]] -name = "kaleido" -version = "0.2.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f7/0ccaa596ec341963adbb4f839774c36d5659e75a0812d946732b927d480e/kaleido-0.2.1-py2.py3-none-macosx_10_11_x86_64.whl", hash = "sha256:ca6f73e7ff00aaebf2843f73f1d3bacde1930ef5041093fe76b83a15785049a7", size = 85153681, upload-time = "2021-03-08T10:27:34.202Z" }, - { url = "https://files.pythonhosted.org/packages/45/8e/4297556be5a07b713bb42dde0f748354de9a6918dee251c0e6bdcda341e7/kaleido-0.2.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:bb9a5d1f710357d5d432ee240ef6658a6d124c3e610935817b4b42da9c787c05", size = 85808197, upload-time = "2021-03-08T10:27:46.561Z" }, - { url = "https://files.pythonhosted.org/packages/ae/b3/a0f0f4faac229b0011d8c4a7ee6da7c2dca0b6fd08039c95920846f23ca4/kaleido-0.2.1-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:aa21cf1bf1c78f8fa50a9f7d45e1003c387bd3d6fe0a767cfbbf344b95bdc3a8", size = 79902476, upload-time = "2021-03-08T10:27:57.364Z" }, - { url = "https://files.pythonhosted.org/packages/a1/2b/680662678a57afab1685f0c431c2aba7783ce4344f06ec162074d485d469/kaleido-0.2.1-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:845819844c8082c9469d9c17e42621fbf85c2b237ef8a86ec8a8527f98b6512a", size = 83711746, upload-time = "2021-03-08T10:28:08.847Z" }, - { url = "https://files.pythonhosted.org/packages/88/89/4b6f8bb3f9ab036fd4ad1cb2d628ab5c81db32ac9aa0641d7b180073ba43/kaleido-0.2.1-py2.py3-none-win32.whl", hash = "sha256:ecc72635860be616c6b7161807a65c0dbd9b90c6437ac96965831e2e24066552", size = 62312480, upload-time = "2021-03-08T10:28:18.204Z" }, - { url = "https://files.pythonhosted.org/packages/f7/9a/0408b02a4bcb3cf8b338a2b074ac7d1b2099e2b092b42473def22f7b625f/kaleido-0.2.1-py2.py3-none-win_amd64.whl", hash = "sha256:4670985f28913c2d063c5734d125ecc28e40810141bdb0a46f15b76c1d45f23c", size = 65945521, upload-time = "2021-03-08T10:28:26.823Z" }, + { url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965 }, ] [[package]] name = "kiwisolver" -version = "1.4.8" +version = "1.4.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/82/59/7c91426a8ac292e1cdd53a63b6d9439abd573c875c3f92c146767dd33faf/kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e", size = 97538, upload-time = "2024-12-24T18:30:51.519Z" } +sdist = { url = "https://files.pythonhosted.org/packages/85/4d/2255e1c76304cbd60b48cee302b66d1dde4468dc5b1160e4b7cb43778f2a/kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60", size = 97286 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/aa/cea685c4ab647f349c3bc92d2daf7ae34c8e8cf405a6dcd3a497f58a2ac3/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502", size = 124152, upload-time = "2024-12-24T18:29:16.85Z" }, - { url = "https://files.pythonhosted.org/packages/c5/0b/8db6d2e2452d60d5ebc4ce4b204feeb16176a851fd42462f66ade6808084/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31", size = 66555, upload-time = "2024-12-24T18:29:19.146Z" }, - { url = "https://files.pythonhosted.org/packages/60/26/d6a0db6785dd35d3ba5bf2b2df0aedc5af089962c6eb2cbf67a15b81369e/kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb", size = 65067, upload-time = "2024-12-24T18:29:20.096Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ed/1d97f7e3561e09757a196231edccc1bcf59d55ddccefa2afc9c615abd8e0/kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f", size = 1378443, upload-time = "2024-12-24T18:29:22.843Z" }, - { url = "https://files.pythonhosted.org/packages/29/61/39d30b99954e6b46f760e6289c12fede2ab96a254c443639052d1b573fbc/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc", size = 1472728, upload-time = "2024-12-24T18:29:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/0c/3e/804163b932f7603ef256e4a715e5843a9600802bb23a68b4e08c8c0ff61d/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a", size = 1478388, upload-time = "2024-12-24T18:29:25.776Z" }, - { url = "https://files.pythonhosted.org/packages/8a/9e/60eaa75169a154700be74f875a4d9961b11ba048bef315fbe89cb6999056/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a", size = 1413849, upload-time = "2024-12-24T18:29:27.202Z" }, - { url = "https://files.pythonhosted.org/packages/bc/b3/9458adb9472e61a998c8c4d95cfdfec91c73c53a375b30b1428310f923e4/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a", size = 1475533, upload-time = "2024-12-24T18:29:28.638Z" }, - { url = "https://files.pythonhosted.org/packages/e4/7a/0a42d9571e35798de80aef4bb43a9b672aa7f8e58643d7bd1950398ffb0a/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3", size = 2268898, upload-time = "2024-12-24T18:29:30.368Z" }, - { url = "https://files.pythonhosted.org/packages/d9/07/1255dc8d80271400126ed8db35a1795b1a2c098ac3a72645075d06fe5c5d/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b", size = 2425605, upload-time = "2024-12-24T18:29:33.151Z" }, - { url = "https://files.pythonhosted.org/packages/84/df/5a3b4cf13780ef6f6942df67b138b03b7e79e9f1f08f57c49957d5867f6e/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4", size = 2375801, upload-time = "2024-12-24T18:29:34.584Z" }, - { url = "https://files.pythonhosted.org/packages/8f/10/2348d068e8b0f635c8c86892788dac7a6b5c0cb12356620ab575775aad89/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d", size = 2520077, upload-time = "2024-12-24T18:29:36.138Z" }, - { url = "https://files.pythonhosted.org/packages/32/d8/014b89fee5d4dce157d814303b0fce4d31385a2af4c41fed194b173b81ac/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8", size = 2338410, upload-time = "2024-12-24T18:29:39.991Z" }, - { url = "https://files.pythonhosted.org/packages/bd/72/dfff0cc97f2a0776e1c9eb5bef1ddfd45f46246c6533b0191887a427bca5/kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50", size = 71853, upload-time = "2024-12-24T18:29:42.006Z" }, - { url = "https://files.pythonhosted.org/packages/dc/85/220d13d914485c0948a00f0b9eb419efaf6da81b7d72e88ce2391f7aed8d/kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476", size = 65424, upload-time = "2024-12-24T18:29:44.38Z" }, + { url = "https://files.pythonhosted.org/packages/c4/06/7da99b04259b0f18b557a4effd1b9c901a747f7fdd84cf834ccf520cb0b2/kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e", size = 121913 }, + { url = "https://files.pythonhosted.org/packages/97/f5/b8a370d1aa593c17882af0a6f6755aaecd643640c0ed72dcfd2eafc388b9/kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6", size = 65627 }, + { url = "https://files.pythonhosted.org/packages/2a/fc/6c0374f7503522539e2d4d1b497f5ebad3f8ed07ab51aed2af988dd0fb65/kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750", size = 63888 }, + { url = "https://files.pythonhosted.org/packages/bf/3e/0b7172793d0f41cae5c923492da89a2ffcd1adf764c16159ca047463ebd3/kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d", size = 1369145 }, + { url = "https://files.pythonhosted.org/packages/77/92/47d050d6f6aced2d634258123f2688fbfef8ded3c5baf2c79d94d91f1f58/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379", size = 1461448 }, + { url = "https://files.pythonhosted.org/packages/9c/1b/8f80b18e20b3b294546a1adb41701e79ae21915f4175f311a90d042301cf/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c", size = 1578750 }, + { url = "https://files.pythonhosted.org/packages/a4/fe/fe8e72f3be0a844f257cadd72689c0848c6d5c51bc1d60429e2d14ad776e/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34", size = 1507175 }, + { url = "https://files.pythonhosted.org/packages/39/fa/cdc0b6105d90eadc3bee525fecc9179e2b41e1ce0293caaf49cb631a6aaf/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1", size = 1463963 }, + { url = "https://files.pythonhosted.org/packages/6e/5c/0c03c4e542720c6177d4f408e56d1c8315899db72d46261a4e15b8b33a41/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f", size = 2248220 }, + { url = "https://files.pythonhosted.org/packages/3d/ee/55ef86d5a574f4e767df7da3a3a7ff4954c996e12d4fbe9c408170cd7dcc/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b", size = 2404463 }, + { url = "https://files.pythonhosted.org/packages/0f/6d/73ad36170b4bff4825dc588acf4f3e6319cb97cd1fb3eb04d9faa6b6f212/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27", size = 2352842 }, + { url = "https://files.pythonhosted.org/packages/0b/16/fa531ff9199d3b6473bb4d0f47416cdb08d556c03b8bc1cccf04e756b56d/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a", size = 2501635 }, + { url = "https://files.pythonhosted.org/packages/78/7e/aa9422e78419db0cbe75fb86d8e72b433818f2e62e2e394992d23d23a583/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee", size = 2314556 }, + { url = "https://files.pythonhosted.org/packages/a8/b2/15f7f556df0a6e5b3772a1e076a9d9f6c538ce5f05bd590eca8106508e06/kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07", size = 46364 }, + { url = "https://files.pythonhosted.org/packages/0b/db/32e897e43a330eee8e4770bfd2737a9584b23e33587a0812b8e20aac38f7/kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76", size = 55887 }, + { url = "https://files.pythonhosted.org/packages/c8/a4/df2bdca5270ca85fd25253049eb6708d4127be2ed0e5c2650217450b59e9/kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650", size = 48530 }, ] [[package]] @@ -583,14 +514,20 @@ dependencies = [ { name = "pyparsing" }, { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/68/dd/fa2e1a45fce2d09f4aea3cee169760e672c8262325aa5796c49d543dc7e6/matplotlib-3.10.0.tar.gz", hash = "sha256:b886d02a581b96704c9d1ffe55709e49b4d2d52709ccebc4be42db856e511278", size = 36686418, upload-time = "2024-12-14T06:32:51.547Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/dd/fa2e1a45fce2d09f4aea3cee169760e672c8262325aa5796c49d543dc7e6/matplotlib-3.10.0.tar.gz", hash = "sha256:b886d02a581b96704c9d1ffe55709e49b4d2d52709ccebc4be42db856e511278", size = 36686418 } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/c7/6b2d8cb7cc251d53c976799cacd3200add56351c175ba89ab9cbd7c1e68a/matplotlib-3.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4659665bc7c9b58f8c00317c3c2a299f7f258eeae5a5d56b4c64226fca2f7c59", size = 8172465, upload-time = "2024-12-14T06:31:24.727Z" }, - { url = "https://files.pythonhosted.org/packages/42/2a/6d66d0fba41e13e9ca6512a0a51170f43e7e7ed3a8dfa036324100775612/matplotlib-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d44cb942af1693cced2604c33a9abcef6205601c445f6d0dc531d813af8a2f5a", size = 8043300, upload-time = "2024-12-14T06:31:28.55Z" }, - { url = "https://files.pythonhosted.org/packages/90/60/2a60342b27b90a16bada939a85e29589902b41073f59668b904b15ea666c/matplotlib-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a994f29e968ca002b50982b27168addfd65f0105610b6be7fa515ca4b5307c95", size = 8448936, upload-time = "2024-12-14T06:31:32.223Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b2/d872fc3d753516870d520595ddd8ce4dd44fa797a240999f125f58521ad7/matplotlib-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b0558bae37f154fffda54d779a592bc97ca8b4701f1c710055b609a3bac44c8", size = 8594151, upload-time = "2024-12-14T06:31:34.894Z" }, - { url = "https://files.pythonhosted.org/packages/f4/bd/b2f60cf7f57d014ab33e4f74602a2b5bdc657976db8196bbc022185f6f9c/matplotlib-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:503feb23bd8c8acc75541548a1d709c059b7184cde26314896e10a9f14df5f12", size = 9400347, upload-time = "2024-12-14T06:31:39.552Z" }, - { url = "https://files.pythonhosted.org/packages/9f/6e/264673e64001b99d747aff5a288eca82826c024437a3694e19aed1decf46/matplotlib-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c40ba2eb08b3f5de88152c2333c58cee7edcead0a2a0d60fcafa116b17117adc", size = 8039144, upload-time = "2024-12-14T06:31:44.128Z" }, + { url = "https://files.pythonhosted.org/packages/72/11/1b2a094d95dcb6e6edd4a0b238177c439006c6b7a9fe8d31801237bf512f/matplotlib-3.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96f2886f5c1e466f21cc41b70c5a0cd47bfa0015eb2d5793c88ebce658600e25", size = 8173073 }, + { url = "https://files.pythonhosted.org/packages/0d/c4/87b6ad2723070511a411ea719f9c70fde64605423b184face4e94986de9d/matplotlib-3.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:12eaf48463b472c3c0f8dbacdbf906e573013df81a0ab82f0616ea4b11281908", size = 8043892 }, + { url = "https://files.pythonhosted.org/packages/57/69/cb0812a136550b21361335e9ffb7d459bf6d13e03cb7b015555d5143d2d6/matplotlib-3.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fbbabc82fde51391c4da5006f965e36d86d95f6ee83fb594b279564a4c5d0d2", size = 8450532 }, + { url = "https://files.pythonhosted.org/packages/ea/3a/bab9deb4fb199c05e9100f94d7f1c702f78d3241e6a71b784d2b88d7bebd/matplotlib-3.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad2e15300530c1a94c63cfa546e3b7864bd18ea2901317bae8bbf06a5ade6dcf", size = 8593905 }, + { url = "https://files.pythonhosted.org/packages/8b/66/742fd242f989adc1847ddf5f445815f73ad7c46aa3440690cc889cfa423c/matplotlib-3.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3547d153d70233a8496859097ef0312212e2689cdf8d7ed764441c77604095ae", size = 9399609 }, + { url = "https://files.pythonhosted.org/packages/fa/d6/54cee7142cef7d910a324a7aedf335c0c147b03658b54d49ec48166f10a6/matplotlib-3.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:c55b20591ced744aa04e8c3e4b7543ea4d650b6c3c4b208c08a05b4010e8b442", size = 8039076 }, + { url = "https://files.pythonhosted.org/packages/43/14/815d072dc36e88753433bfd0385113405efb947e6895ff7b4d2e8614a33b/matplotlib-3.10.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ade1003376731a971e398cc4ef38bb83ee8caf0aee46ac6daa4b0506db1fd06", size = 8211000 }, + { url = "https://files.pythonhosted.org/packages/9a/76/34e75f364194ec352678adcb540964be6f35ec7d3d8c75ebcb17e6839359/matplotlib-3.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95b710fea129c76d30be72c3b38f330269363fbc6e570a5dd43580487380b5ff", size = 8087707 }, + { url = "https://files.pythonhosted.org/packages/c3/2b/b6bc0dff6a72d333bc7df94a66e6ce662d224e43daa8ad8ae4eaa9a77f55/matplotlib-3.10.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdbaf909887373c3e094b0318d7ff230b2ad9dcb64da7ade654182872ab2593", size = 8477384 }, + { url = "https://files.pythonhosted.org/packages/c2/2d/b5949fb2b76e9b47ab05e25a5f5f887c70de20d8b0cbc704a4e2ee71c786/matplotlib-3.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d907fddb39f923d011875452ff1eca29a9e7f21722b873e90db32e5d8ddff12e", size = 8610334 }, + { url = "https://files.pythonhosted.org/packages/d6/9a/6e3c799d5134d9af44b01c787e1360bee38cf51850506ea2e743a787700b/matplotlib-3.10.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3b427392354d10975c1d0f4ee18aa5844640b512d5311ef32efd4dd7db106ede", size = 9406777 }, + { url = "https://files.pythonhosted.org/packages/0e/dd/e6ae97151e5ed648ab2ea48885bc33d39202b640eec7a2910e2c843f7ac0/matplotlib-3.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5fd41b0ec7ee45cd960a8e71aea7c946a28a0b8a4dcee47d2856b2af051f334c", size = 8109742 }, ] [[package]] @@ -600,9 +537,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 }, ] [[package]] @@ -613,35 +550,35 @@ dependencies = [ { name = "numpy" }, { name = "pandas" }, { name = "scipy" }, - { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, + { name = "tzdata", marker = "platform_system == 'Emscripten' or platform_system == 'Windows'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/c3/9f83c374314b2b42e7aec65f3bf87046415ab265f209fa8a04eb6da822ee/mizani-0.13.1.tar.gz", hash = "sha256:e3247ea12c746c8104767d7e42a2d16473173c7bc314f298d8294a58f4653353", size = 765181, upload-time = "2024-12-10T16:33:37.468Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/c3/9f83c374314b2b42e7aec65f3bf87046415ab265f209fa8a04eb6da822ee/mizani-0.13.1.tar.gz", hash = "sha256:e3247ea12c746c8104767d7e42a2d16473173c7bc314f298d8294a58f4653353", size = 765181 } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/85/16e17e75831ec01808c5f07e578f1552df87a4f5c827caa8be28f97b4c19/mizani-0.13.1-py3-none-any.whl", hash = "sha256:7da0dcacd43fbcc01c279ea06a76f1f064ae90dbb387c4a985ba24a92d3c7d7a", size = 127896, upload-time = "2024-12-10T16:33:35.763Z" }, + { url = "https://files.pythonhosted.org/packages/29/85/16e17e75831ec01808c5f07e578f1552df87a4f5c827caa8be28f97b4c19/mizani-0.13.1-py3-none-any.whl", hash = "sha256:7da0dcacd43fbcc01c279ea06a76f1f064ae90dbb387c4a985ba24a92d3c7d7a", size = 127896 }, ] [[package]] name = "multidict" version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002, upload-time = "2024-09-09T23:49:38.163Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713, upload-time = "2024-09-09T23:48:01.893Z" }, - { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516, upload-time = "2024-09-09T23:48:03.463Z" }, - { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557, upload-time = "2024-09-09T23:48:04.905Z" }, - { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170, upload-time = "2024-09-09T23:48:06.862Z" }, - { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836, upload-time = "2024-09-09T23:48:08.537Z" }, - { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475, upload-time = "2024-09-09T23:48:09.865Z" }, - { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049, upload-time = "2024-09-09T23:48:11.115Z" }, - { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370, upload-time = "2024-09-09T23:48:12.78Z" }, - { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178, upload-time = "2024-09-09T23:48:14.295Z" }, - { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567, upload-time = "2024-09-09T23:48:16.284Z" }, - { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822, upload-time = "2024-09-09T23:48:17.835Z" }, - { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656, upload-time = "2024-09-09T23:48:19.576Z" }, - { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360, upload-time = "2024-09-09T23:48:20.957Z" }, - { url = "https://files.pythonhosted.org/packages/be/05/5d334c1f2462d43fec2363cd00b1c44c93a78c3925d952e9a71caf662e96/multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133", size = 26382, upload-time = "2024-09-09T23:48:22.351Z" }, - { url = "https://files.pythonhosted.org/packages/a3/bf/f332a13486b1ed0496d624bcc7e8357bb8053823e8cd4b9a18edc1d97e73/multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1", size = 28529, upload-time = "2024-09-09T23:48:23.478Z" }, - { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051, upload-time = "2024-09-09T23:49:36.506Z" }, + { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771 }, + { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533 }, + { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595 }, + { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094 }, + { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876 }, + { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500 }, + { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099 }, + { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403 }, + { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348 }, + { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673 }, + { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927 }, + { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711 }, + { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519 }, + { url = "https://files.pythonhosted.org/packages/38/e0/4f5855037a72cd8a7a2f60a3952d9aa45feedb37ae7831642102604e8a37/multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81", size = 26426 }, + { url = "https://files.pythonhosted.org/packages/7e/a5/17ee3a4db1e310b7405f5d25834460073a8ccd86198ce044dfaf69eac073/multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774", size = 28531 }, + { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 }, ] [[package]] @@ -651,73 +588,59 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dill" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603, upload-time = "2024-01-28T18:52:34.85Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824, upload-time = "2024-01-28T18:52:26.062Z" }, - { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519, upload-time = "2024-01-28T18:52:28.115Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741, upload-time = "2024-01-28T18:52:29.395Z" }, - { url = "https://files.pythonhosted.org/packages/ea/89/38df130f2c799090c978b366cfdf5b96d08de5b29a4a293df7f7429fa50b/multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435", size = 132628, upload-time = "2024-01-28T18:52:30.853Z" }, - { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351, upload-time = "2024-01-28T18:52:31.981Z" }, -] - -[[package]] -name = "narwhals" -version = "1.38.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/fa/f4c2b2524b6b1e7475af933849ecaad280822eab5631151ccb1993d600ce/narwhals-1.38.2.tar.gz", hash = "sha256:7c5fbc9f2b8e1d5d95f49dcef9c2d94bf17810de68c87ff195dc7d22f7b3eeb5", size = 277368, upload-time = "2025-05-08T17:02:28.125Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/56/45/59251121eb801f033ffdd56d8893a0f20ec661bdc362f8d52a4e4d547f91/narwhals-1.38.2-py3-none-any.whl", hash = "sha256:a33a182e32f18d794a04e7828a5c401fb26ce9083f609993e7e5064aace641c7", size = 338437, upload-time = "2025-05-08T17:02:25.88Z" }, -] - -[[package]] -name = "nbformat" -version = "5.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "fastjsonschema" }, - { name = "jsonschema" }, - { name = "jupyter-core" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824 }, + { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519 }, + { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741 }, + { url = "https://files.pythonhosted.org/packages/ea/89/38df130f2c799090c978b366cfdf5b96d08de5b29a4a293df7f7429fa50b/multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435", size = 132628 }, + { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351 }, ] [[package]] name = "nest-asyncio" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, ] [[package]] name = "numpy" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/fdbf6a7871703df6160b5cf3dd774074b086d278172285c52c2758b76305/numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918", size = 20227662, upload-time = "2024-12-21T22:49:36.523Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/12/b928871c570d4a87ab13d2cc19f8817f17e340d5481621930e76b80ffb7d/numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab", size = 20909861, upload-time = "2024-12-21T22:32:05.145Z" }, - { url = "https://files.pythonhosted.org/packages/3d/c3/59df91ae1d8ad7c5e03efd63fd785dec62d96b0fe56d1f9ab600b55009af/numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa", size = 14095776, upload-time = "2024-12-21T22:32:37.312Z" }, - { url = "https://files.pythonhosted.org/packages/af/4e/8ed5868efc8e601fb69419644a280e9c482b75691466b73bfaab7d86922c/numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315", size = 5126239, upload-time = "2024-12-21T22:32:59.288Z" }, - { url = "https://files.pythonhosted.org/packages/1a/74/dd0bbe650d7bc0014b051f092f2de65e34a8155aabb1287698919d124d7f/numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355", size = 6659296, upload-time = "2024-12-21T22:33:11.456Z" }, - { url = "https://files.pythonhosted.org/packages/7f/11/4ebd7a3f4a655764dc98481f97bd0a662fb340d1001be6050606be13e162/numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7", size = 14047121, upload-time = "2024-12-21T22:33:47.216Z" }, - { url = "https://files.pythonhosted.org/packages/7f/a7/c1f1d978166eb6b98ad009503e4d93a8c1962d0eb14a885c352ee0276a54/numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d", size = 16096599, upload-time = "2024-12-21T22:34:27.868Z" }, - { url = "https://files.pythonhosted.org/packages/3d/6d/0e22afd5fcbb4d8d0091f3f46bf4e8906399c458d4293da23292c0ba5022/numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51", size = 15243932, upload-time = "2024-12-21T22:35:05.318Z" }, - { url = "https://files.pythonhosted.org/packages/03/39/e4e5832820131ba424092b9610d996b37e5557180f8e2d6aebb05c31ae54/numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046", size = 17861032, upload-time = "2024-12-21T22:35:37.77Z" }, - { url = "https://files.pythonhosted.org/packages/5f/8a/3794313acbf5e70df2d5c7d2aba8718676f8d054a05abe59e48417fb2981/numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2", size = 6274018, upload-time = "2024-12-21T22:35:51.117Z" }, - { url = "https://files.pythonhosted.org/packages/17/c1/c31d3637f2641e25c7a19adf2ae822fdaf4ddd198b05d79a92a9ce7cb63e/numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8", size = 12613843, upload-time = "2024-12-21T22:36:22.816Z" }, +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1d565e0f6e156e1522ab564176b8b29d71e13d8caf003a08768df3d5cec5/numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0", size = 20225497 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/4c/0d1eef206545c994289e7a9de21b642880a11e0ed47a2b0c407c688c4f69/numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367", size = 20895707 }, + { url = "https://files.pythonhosted.org/packages/16/cb/88f6c1e6df83002c421d5f854ccf134aa088aa997af786a5dac3f32ec99b/numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae", size = 14110592 }, + { url = "https://files.pythonhosted.org/packages/b4/54/817e6894168a43f33dca74199ba0dd0f1acd99aa6323ed6d323d63d640a2/numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69", size = 5110858 }, + { url = "https://files.pythonhosted.org/packages/c7/99/00d8a1a8eb70425bba7880257ed73fed08d3e8d05da4202fb6b9a81d5ee4/numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13", size = 6645143 }, + { url = "https://files.pythonhosted.org/packages/34/86/5b9c2b7c56e7a9d9297a0a4be0b8433f498eba52a8f5892d9132b0f64627/numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671", size = 14042812 }, + { url = "https://files.pythonhosted.org/packages/df/54/13535f74391dbe5f479ceed96f1403267be302c840040700d4fd66688089/numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571", size = 16093419 }, + { url = "https://files.pythonhosted.org/packages/dd/37/dfb2056842ac61315f225aa56f455da369f5223e4c5a38b91d20da1b628b/numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d", size = 15238969 }, + { url = "https://files.pythonhosted.org/packages/5a/3d/d20d24ee313992f0b7e7b9d9eef642d9b545d39d5b91c4a2cc8c98776328/numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742", size = 17855705 }, + { url = "https://files.pythonhosted.org/packages/5b/40/944c9ee264f875a2db6f79380944fd2b5bb9d712bb4a134d11f45ad5b693/numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e", size = 6270078 }, + { url = "https://files.pythonhosted.org/packages/30/04/e1ee6f8b22034302d4c5c24e15782bdedf76d90b90f3874ed0b48525def0/numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2", size = 12605791 }, + { url = "https://files.pythonhosted.org/packages/ef/fb/51d458625cd6134d60ac15180ae50995d7d21b0f2f92a6286ae7b0792d19/numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95", size = 20920160 }, + { url = "https://files.pythonhosted.org/packages/b4/34/162ae0c5d2536ea4be98c813b5161c980f0443cd5765fde16ddfe3450140/numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c", size = 14119064 }, + { url = "https://files.pythonhosted.org/packages/17/6c/4195dd0e1c41c55f466d516e17e9e28510f32af76d23061ea3da67438e3c/numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca", size = 5152778 }, + { url = "https://files.pythonhosted.org/packages/2f/47/ea804ae525832c8d05ed85b560dfd242d34e4bb0962bc269ccaa720fb934/numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d", size = 6667605 }, + { url = "https://files.pythonhosted.org/packages/76/99/34d20e50b3d894bb16b5374bfbee399ab8ff3a33bf1e1f0b8acfe7bbd70d/numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529", size = 14013275 }, + { url = "https://files.pythonhosted.org/packages/69/8f/a1df7bd02d434ab82539517d1b98028985700cfc4300bc5496fb140ca648/numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3", size = 16074900 }, + { url = "https://files.pythonhosted.org/packages/04/94/b419e7a76bf21a00fcb03c613583f10e389fdc8dfe420412ff5710c8ad3d/numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab", size = 15219122 }, + { url = "https://files.pythonhosted.org/packages/65/d9/dddf398b2b6c5d750892a207a469c2854a8db0f033edaf72103af8cf05aa/numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72", size = 17851668 }, + { url = "https://files.pythonhosted.org/packages/d4/dc/09a4e5819a9782a213c0eb4eecacdc1cd75ad8dac99279b04cfccb7eeb0a/numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066", size = 6325288 }, + { url = "https://files.pythonhosted.org/packages/ce/e1/e0d06ec34036c92b43aef206efe99a5f5f04e12c776eab82a36e00c40afc/numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881", size = 12692303 }, ] [[package]] name = "packaging" version = "24.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950, upload-time = "2024-11-08T09:47:47.202Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" }, + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, ] [[package]] @@ -730,24 +653,30 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213, upload-time = "2024-09-20T13:10:04.827Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893, upload-time = "2024-09-20T13:09:09.655Z" }, - { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475, upload-time = "2024-09-20T13:09:14.718Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645, upload-time = "2024-09-20T19:02:03.88Z" }, - { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445, upload-time = "2024-09-20T13:09:17.621Z" }, - { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235, upload-time = "2024-09-20T19:02:07.094Z" }, - { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756, upload-time = "2024-09-20T13:09:20.474Z" }, - { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248, upload-time = "2024-09-20T13:09:23.137Z" }, + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, ] [[package]] name = "parso" version = "0.8.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609, upload-time = "2024-04-05T09:43:55.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 }, ] [[package]] @@ -757,9 +686,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/81/74f6a65b848ffd16c18f920620ce999fe45fe27f01ab3911260ce4ed85e4/patsy-1.0.1.tar.gz", hash = "sha256:e786a9391eec818c054e359b737bbce692f051aee4c661f4141cc88fb459c0c4", size = 396010, upload-time = "2024-11-12T14:10:54.642Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/81/74f6a65b848ffd16c18f920620ce999fe45fe27f01ab3911260ce4ed85e4/patsy-1.0.1.tar.gz", hash = "sha256:e786a9391eec818c054e359b737bbce692f051aee4c661f4141cc88fb459c0c4", size = 396010 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/2b/b50d3d08ea0fc419c183a84210571eba005328efa62b6b98bc28e9ead32a/patsy-1.0.1-py2.py3-none-any.whl", hash = "sha256:751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c", size = 232923, upload-time = "2024-11-12T14:10:52.85Z" }, + { url = "https://files.pythonhosted.org/packages/87/2b/b50d3d08ea0fc419c183a84210571eba005328efa62b6b98bc28e9ead32a/patsy-1.0.1-py2.py3-none-any.whl", hash = "sha256:751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c", size = 232923 }, ] [[package]] @@ -769,64 +698,50 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ptyprocess" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, ] [[package]] name = "pillow" -version = "10.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059, upload-time = "2024-07-01T09:48:43.583Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350, upload-time = "2024-07-01T09:46:17.177Z" }, - { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980, upload-time = "2024-07-01T09:46:19.169Z" }, - { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799, upload-time = "2024-07-01T09:46:21.883Z" }, - { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973, upload-time = "2024-07-01T09:46:24.321Z" }, - { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054, upload-time = "2024-07-01T09:46:26.825Z" }, - { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484, upload-time = "2024-07-01T09:46:29.355Z" }, - { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375, upload-time = "2024-07-01T09:46:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773, upload-time = "2024-07-01T09:46:33.73Z" }, - { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690, upload-time = "2024-07-01T09:46:36.587Z" }, - { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951, upload-time = "2024-07-01T09:46:38.777Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427, upload-time = "2024-07-01T09:46:43.15Z" }, -] - -[[package]] -name = "pip" -version = "25.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/53/b309b4a497b09655cb7e07088966881a57d082f48ac3cb54ea729fd2c6cf/pip-25.0.1.tar.gz", hash = "sha256:88f96547ea48b940a3a385494e181e29fb8637898f88d88737c5049780f196ea", size = 1950850, upload-time = "2025-02-09T17:14:04.423Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/bc/b7db44f5f39f9d0494071bddae6880eb645970366d0a200022a1a93d57f5/pip-25.0.1-py3-none-any.whl", hash = "sha256:c46efd13b6aa8279f33f2864459c8ce587ea6a1a59ee20de055868d8f7688f7f", size = 1841526, upload-time = "2025-02-09T17:14:01.463Z" }, +version = "11.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/26/0d95c04c868f6bdb0c447e3ee2de5564411845e36a858cfd63766bc7b563/pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739", size = 46737780 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/24/e2e15e392d00fcf4215907465d8ec2a2f23bcec1481a8ebe4ae760459995/pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699", size = 3147300 }, + { url = "https://files.pythonhosted.org/packages/43/72/92ad4afaa2afc233dc44184adff289c2e77e8cd916b3ddb72ac69495bda3/pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38", size = 2978742 }, + { url = "https://files.pythonhosted.org/packages/9e/da/c8d69c5bc85d72a8523fe862f05ababdc52c0a755cfe3d362656bb86552b/pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2", size = 4194349 }, + { url = "https://files.pythonhosted.org/packages/cd/e8/686d0caeed6b998351d57796496a70185376ed9c8ec7d99e1d19ad591fc6/pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2", size = 4298714 }, + { url = "https://files.pythonhosted.org/packages/ec/da/430015cec620d622f06854be67fd2f6721f52fc17fca8ac34b32e2d60739/pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527", size = 4208514 }, + { url = "https://files.pythonhosted.org/packages/44/ae/7e4f6662a9b1cb5f92b9cc9cab8321c381ffbee309210940e57432a4063a/pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa", size = 4380055 }, + { url = "https://files.pythonhosted.org/packages/74/d5/1a807779ac8a0eeed57f2b92a3c32ea1b696e6140c15bd42eaf908a261cd/pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f", size = 4296751 }, + { url = "https://files.pythonhosted.org/packages/38/8c/5fa3385163ee7080bc13026d59656267daaaaf3c728c233d530e2c2757c8/pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb", size = 4430378 }, + { url = "https://files.pythonhosted.org/packages/ca/1d/ad9c14811133977ff87035bf426875b93097fb50af747793f013979facdb/pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798", size = 2249588 }, + { url = "https://files.pythonhosted.org/packages/fb/01/3755ba287dac715e6afdb333cb1f6d69740a7475220b4637b5ce3d78cec2/pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de", size = 2567509 }, + { url = "https://files.pythonhosted.org/packages/c0/98/2c7d727079b6be1aba82d195767d35fcc2d32204c7a5820f822df5330152/pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84", size = 2254791 }, + { url = "https://files.pythonhosted.org/packages/eb/38/998b04cc6f474e78b563716b20eecf42a2fa16a84589d23c8898e64b0ffd/pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b", size = 3150854 }, + { url = "https://files.pythonhosted.org/packages/13/8e/be23a96292113c6cb26b2aa3c8b3681ec62b44ed5c2bd0b258bd59503d3c/pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003", size = 2982369 }, + { url = "https://files.pythonhosted.org/packages/97/8a/3db4eaabb7a2ae8203cd3a332a005e4aba00067fc514aaaf3e9721be31f1/pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2", size = 4333703 }, + { url = "https://files.pythonhosted.org/packages/28/ac/629ffc84ff67b9228fe87a97272ab125bbd4dc462745f35f192d37b822f1/pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a", size = 4412550 }, + { url = "https://files.pythonhosted.org/packages/d6/07/a505921d36bb2df6868806eaf56ef58699c16c388e378b0dcdb6e5b2fb36/pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8", size = 4461038 }, + { url = "https://files.pythonhosted.org/packages/d6/b9/fb620dd47fc7cc9678af8f8bd8c772034ca4977237049287e99dda360b66/pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8", size = 2253197 }, + { url = "https://files.pythonhosted.org/packages/df/86/25dde85c06c89d7fc5db17940f07aae0a56ac69aa9ccb5eb0f09798862a8/pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904", size = 2572169 }, + { url = "https://files.pythonhosted.org/packages/51/85/9c33f2517add612e17f3381aee7c4072779130c634921a756c97bc29fb49/pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3", size = 2256828 }, ] [[package]] name = "platformdirs" version = "4.3.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" }, -] - -[[package]] -name = "plotly" -version = "6.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "narwhals" }, - { name = "packaging" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/cc/e41b5f697ae403f0b50e47b7af2e36642a193085f553bf7cc1169362873a/plotly-6.0.1.tar.gz", hash = "sha256:dd8400229872b6e3c964b099be699f8d00c489a974f2cfccfad5e8240873366b", size = 8094643, upload-time = "2025-03-17T15:02:23.994Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/65/ad2bc85f7377f5cfba5d4466d5474423a3fb7f6a97fd807c06f92dd3e721/plotly-6.0.1-py3-none-any.whl", hash = "sha256:4714db20fea57a435692c548a4eb4fae454f7daddf15f8d8ba7e1045681d7768", size = 14805757, upload-time = "2025-03-17T15:02:18.73Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, ] [[package]] name = "plotnine" -version = "0.14.5" +version = "0.14.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "matplotlib" }, @@ -836,18 +751,18 @@ dependencies = [ { name = "scipy" }, { name = "statsmodels" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5d/0e/618bfa724ad19418c83eb22cdc4332dc69bb67f47094bd013ffe15e188d2/plotnine-0.14.5.tar.gz", hash = "sha256:9e75969e8e10d8d770a4be36d10e075cc10b88ca6fcc99e36ada53436fb5653f", size = 6424617, upload-time = "2025-01-02T11:06:07.338Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/f6/1d9ad7dd5804202a43d5a492880bd9d95eeea9d73538163607182cea67db/plotnine-0.14.3.tar.gz", hash = "sha256:e0834d7752bb9c701071cecb4a65a6009d86b86b6e276320c62a23665e289b56", size = 6413160 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/c5/7cfda7ba9fa02243367fbfb4880b6de8039266f22c47c2dbbd39b6adc46f/plotnine-0.14.5-py3-none-any.whl", hash = "sha256:4a8bc4360732dd69a0263def4abab285ed8f0f4386186f1e44c642f2cea79b88", size = 1301197, upload-time = "2025-01-02T11:06:03.686Z" }, + { url = "https://files.pythonhosted.org/packages/f9/19/e5ee695f2fed60ea85800217f8106890b02a315dd910be9ac35244968804/plotnine-0.14.3-py3-none-any.whl", hash = "sha256:9039b2d675484c3bfe76d5dc44b49d1654571e7ed14650993394db706efc5635", size = 1300567 }, ] [[package]] name = "pluggy" version = "1.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] [[package]] @@ -857,148 +772,115 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684, upload-time = "2024-09-25T10:20:57.609Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595, upload-time = "2024-09-25T10:20:53.932Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 }, ] [[package]] name = "propcache" version = "0.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/c8/2a13f78d82211490855b2fb303b6721348d0787fdd9a12ac46d99d3acde1/propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", size = 41735, upload-time = "2024-12-01T18:29:16.437Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/c8/2a13f78d82211490855b2fb303b6721348d0787fdd9a12ac46d99d3acde1/propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", size = 41735 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/28/1d205fe49be8b1b4df4c50024e62480a442b1a7b818e734308bb0d17e7fb/propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a", size = 79588, upload-time = "2024-12-01T18:28:03.327Z" }, - { url = "https://files.pythonhosted.org/packages/21/ee/fc4d893f8d81cd4971affef2a6cb542b36617cd1d8ce56b406112cb80bf7/propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0", size = 45825, upload-time = "2024-12-01T18:28:06.78Z" }, - { url = "https://files.pythonhosted.org/packages/4a/de/bbe712f94d088da1d237c35d735f675e494a816fd6f54e9db2f61ef4d03f/propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d", size = 45357, upload-time = "2024-12-01T18:28:08.575Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/7ae06a6cf2a2f1cb382586d5a99efe66b0b3d0c6f9ac2f759e6f7af9d7cf/propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4", size = 241869, upload-time = "2024-12-01T18:28:10.396Z" }, - { url = "https://files.pythonhosted.org/packages/cc/59/227a78be960b54a41124e639e2c39e8807ac0c751c735a900e21315f8c2b/propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d", size = 247884, upload-time = "2024-12-01T18:28:11.746Z" }, - { url = "https://files.pythonhosted.org/packages/84/58/f62b4ffaedf88dc1b17f04d57d8536601e4e030feb26617228ef930c3279/propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5", size = 248486, upload-time = "2024-12-01T18:28:13.048Z" }, - { url = "https://files.pythonhosted.org/packages/1c/07/ebe102777a830bca91bbb93e3479cd34c2ca5d0361b83be9dbd93104865e/propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24", size = 243649, upload-time = "2024-12-01T18:28:14.297Z" }, - { url = "https://files.pythonhosted.org/packages/ed/bc/4f7aba7f08f520376c4bb6a20b9a981a581b7f2e385fa0ec9f789bb2d362/propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff", size = 229103, upload-time = "2024-12-01T18:28:15.913Z" }, - { url = "https://files.pythonhosted.org/packages/fe/d5/04ac9cd4e51a57a96f78795e03c5a0ddb8f23ec098b86f92de028d7f2a6b/propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f", size = 226607, upload-time = "2024-12-01T18:28:18.015Z" }, - { url = "https://files.pythonhosted.org/packages/e3/f0/24060d959ea41d7a7cc7fdbf68b31852331aabda914a0c63bdb0e22e96d6/propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec", size = 221153, upload-time = "2024-12-01T18:28:19.937Z" }, - { url = "https://files.pythonhosted.org/packages/77/a7/3ac76045a077b3e4de4859a0753010765e45749bdf53bd02bc4d372da1a0/propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348", size = 222151, upload-time = "2024-12-01T18:28:21.186Z" }, - { url = "https://files.pythonhosted.org/packages/e7/af/5e29da6f80cebab3f5a4dcd2a3240e7f56f2c4abf51cbfcc99be34e17f0b/propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6", size = 233812, upload-time = "2024-12-01T18:28:22.816Z" }, - { url = "https://files.pythonhosted.org/packages/8c/89/ebe3ad52642cc5509eaa453e9f4b94b374d81bae3265c59d5c2d98efa1b4/propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6", size = 238829, upload-time = "2024-12-01T18:28:24.071Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2f/6b32f273fa02e978b7577159eae7471b3cfb88b48563b1c2578b2d7ca0bb/propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518", size = 230704, upload-time = "2024-12-01T18:28:25.314Z" }, - { url = "https://files.pythonhosted.org/packages/5c/2e/f40ae6ff5624a5f77edd7b8359b208b5455ea113f68309e2b00a2e1426b6/propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246", size = 40050, upload-time = "2024-12-01T18:28:26.617Z" }, - { url = "https://files.pythonhosted.org/packages/3b/77/a92c3ef994e47180862b9d7d11e37624fb1c00a16d61faf55115d970628b/propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1", size = 44117, upload-time = "2024-12-01T18:28:27.643Z" }, - { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818, upload-time = "2024-12-01T18:29:14.716Z" }, + { url = "https://files.pythonhosted.org/packages/0f/2a/329e0547cf2def8857157f9477669043e75524cc3e6251cef332b3ff256f/propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc", size = 77002 }, + { url = "https://files.pythonhosted.org/packages/12/2d/c4df5415e2382f840dc2ecbca0eeb2293024bc28e57a80392f2012b4708c/propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9", size = 44639 }, + { url = "https://files.pythonhosted.org/packages/d0/5a/21aaa4ea2f326edaa4e240959ac8b8386ea31dedfdaa636a3544d9e7a408/propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439", size = 44049 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/021b6cd86c0acc90d74784ccbb66808b0bd36067a1bf3e2deb0f3845f618/propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536", size = 224819 }, + { url = "https://files.pythonhosted.org/packages/3c/57/c2fdeed1b3b8918b1770a133ba5c43ad3d78e18285b0c06364861ef5cc38/propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629", size = 229625 }, + { url = "https://files.pythonhosted.org/packages/9d/81/70d4ff57bf2877b5780b466471bebf5892f851a7e2ca0ae7ffd728220281/propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b", size = 232934 }, + { url = "https://files.pythonhosted.org/packages/3c/b9/bb51ea95d73b3fb4100cb95adbd4e1acaf2cbb1fd1083f5468eeb4a099a8/propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052", size = 227361 }, + { url = "https://files.pythonhosted.org/packages/f1/20/3c6d696cd6fd70b29445960cc803b1851a1131e7a2e4ee261ee48e002bcd/propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce", size = 213904 }, + { url = "https://files.pythonhosted.org/packages/a1/cb/1593bfc5ac6d40c010fa823f128056d6bc25b667f5393781e37d62f12005/propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d", size = 212632 }, + { url = "https://files.pythonhosted.org/packages/6d/5c/e95617e222be14a34c709442a0ec179f3207f8a2b900273720501a70ec5e/propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce", size = 207897 }, + { url = "https://files.pythonhosted.org/packages/8e/3b/56c5ab3dc00f6375fbcdeefdede5adf9bee94f1fab04adc8db118f0f9e25/propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95", size = 208118 }, + { url = "https://files.pythonhosted.org/packages/86/25/d7ef738323fbc6ebcbce33eb2a19c5e07a89a3df2fded206065bd5e868a9/propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf", size = 217851 }, + { url = "https://files.pythonhosted.org/packages/b3/77/763e6cef1852cf1ba740590364ec50309b89d1c818e3256d3929eb92fabf/propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f", size = 222630 }, + { url = "https://files.pythonhosted.org/packages/4f/e9/0f86be33602089c701696fbed8d8c4c07b6ee9605c5b7536fd27ed540c5b/propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30", size = 216269 }, + { url = "https://files.pythonhosted.org/packages/cc/02/5ac83217d522394b6a2e81a2e888167e7ca629ef6569a3f09852d6dcb01a/propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6", size = 39472 }, + { url = "https://files.pythonhosted.org/packages/f4/33/d6f5420252a36034bc8a3a01171bc55b4bff5df50d1c63d9caa50693662f/propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1", size = 43363 }, + { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 }, ] [[package]] name = "psutil" -version = "6.1.1" +version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/5a/07871137bb752428aa4b659f910b399ba6f291156bdea939be3e96cae7cb/psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5", size = 508502, upload-time = "2024-12-19T18:21:20.568Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/99/ca79d302be46f7bdd8321089762dd4476ee725fce16fc2b2e1dbba8cac17/psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8", size = 247511, upload-time = "2024-12-19T18:21:45.163Z" }, - { url = "https://files.pythonhosted.org/packages/0b/6b/73dbde0dd38f3782905d4587049b9be64d76671042fdcaf60e2430c6796d/psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377", size = 248985, upload-time = "2024-12-19T18:21:49.254Z" }, - { url = "https://files.pythonhosted.org/packages/17/38/c319d31a1d3f88c5b79c68b3116c129e5133f1822157dd6da34043e32ed6/psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003", size = 284488, upload-time = "2024-12-19T18:21:51.638Z" }, - { url = "https://files.pythonhosted.org/packages/9c/39/0f88a830a1c8a3aba27fededc642da37613c57cbff143412e3536f89784f/psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160", size = 287477, upload-time = "2024-12-19T18:21:55.306Z" }, - { url = "https://files.pythonhosted.org/packages/47/da/99f4345d4ddf2845cb5b5bd0d93d554e84542d116934fde07a0c50bd4e9f/psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3", size = 289017, upload-time = "2024-12-19T18:21:57.875Z" }, - { url = "https://files.pythonhosted.org/packages/38/53/bd755c2896f4461fd4f36fa6a6dcb66a88a9e4b9fd4e5b66a77cf9d4a584/psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53", size = 250602, upload-time = "2024-12-19T18:22:08.808Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d7/7831438e6c3ebbfa6e01a927127a6cb42ad3ab844247f3c5b96bea25d73d/psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649", size = 254444, upload-time = "2024-12-19T18:22:11.335Z" }, + { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, + { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, + { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, + { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, + { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, + { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, ] [[package]] name = "ptyprocess" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, ] [[package]] name = "pure-eval" version = "0.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 }, ] [[package]] name = "pyarrow" version = "18.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/7b/640785a9062bb00314caa8a387abce547d2a420cf09bd6c715fe659ccffb/pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73", size = 1118671, upload-time = "2024-11-26T02:01:48.62Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/7b/640785a9062bb00314caa8a387abce547d2a420cf09bd6c715fe659ccffb/pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73", size = 1118671 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/50/12829e7111b932581e51dda51d5cb39207a056c30fe31ef43f14c63c4d7e/pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d", size = 29514620, upload-time = "2024-11-26T01:59:39.797Z" }, - { url = "https://files.pythonhosted.org/packages/d1/41/468c944eab157702e96abab3d07b48b8424927d4933541ab43788bb6964d/pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee", size = 30856494, upload-time = "2024-11-26T01:59:44.725Z" }, - { url = "https://files.pythonhosted.org/packages/68/f9/29fb659b390312a7345aeb858a9d9c157552a8852522f2c8bad437c29c0a/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992", size = 39203624, upload-time = "2024-11-26T01:59:49.189Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f6/19360dae44200e35753c5c2889dc478154cd78e61b1f738514c9f131734d/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54", size = 40139341, upload-time = "2024-11-26T01:59:54.849Z" }, - { url = "https://files.pythonhosted.org/packages/bb/e6/9b3afbbcf10cc724312e824af94a2e993d8ace22994d823f5c35324cebf5/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33", size = 38618629, upload-time = "2024-11-26T01:59:59.966Z" }, - { url = "https://files.pythonhosted.org/packages/3a/2e/3b99f8a3d9e0ccae0e961978a0d0089b25fb46ebbcfb5ebae3cca179a5b3/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30", size = 40078661, upload-time = "2024-11-26T02:00:04.55Z" }, - { url = "https://files.pythonhosted.org/packages/76/52/f8da04195000099d394012b8d42c503d7041b79f778d854f410e5f05049a/pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99", size = 25092330, upload-time = "2024-11-26T02:00:09.576Z" }, + { url = "https://files.pythonhosted.org/packages/cb/87/aa4d249732edef6ad88899399047d7e49311a55749d3c373007d034ee471/pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b", size = 29497406 }, + { url = "https://files.pythonhosted.org/packages/3c/c7/ed6adb46d93a3177540e228b5ca30d99fc8ea3b13bdb88b6f8b6467e2cb7/pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2", size = 30835095 }, + { url = "https://files.pythonhosted.org/packages/41/d7/ed85001edfb96200ff606943cff71d64f91926ab42828676c0fc0db98963/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191", size = 39194527 }, + { url = "https://files.pythonhosted.org/packages/59/16/35e28eab126342fa391593415d79477e89582de411bb95232f28b131a769/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa", size = 40131443 }, + { url = "https://files.pythonhosted.org/packages/0c/95/e855880614c8da20f4cd74fa85d7268c725cf0013dc754048593a38896a0/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c", size = 38608750 }, + { url = "https://files.pythonhosted.org/packages/54/9d/f253554b1457d4fdb3831b7bd5f8f00f1795585a606eabf6fec0a58a9c38/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c", size = 40066690 }, + { url = "https://files.pythonhosted.org/packages/2f/58/8912a2563e6b8273e8aa7b605a345bba5a06204549826f6493065575ebc0/pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181", size = 25081054 }, + { url = "https://files.pythonhosted.org/packages/82/f9/d06ddc06cab1ada0c2f2fd205ac8c25c2701182de1b9c4bf7a0a44844431/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc", size = 29525542 }, + { url = "https://files.pythonhosted.org/packages/ab/94/8917e3b961810587ecbdaa417f8ebac0abb25105ae667b7aa11c05876976/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386", size = 30829412 }, + { url = "https://files.pythonhosted.org/packages/5e/e3/3b16c3190f3d71d3b10f6758d2d5f7779ef008c4fd367cedab3ed178a9f7/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324", size = 39119106 }, + { url = "https://files.pythonhosted.org/packages/1d/d6/5d704b0d25c3c79532f8c0639f253ec2803b897100f64bcb3f53ced236e5/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8", size = 40090940 }, + { url = "https://files.pythonhosted.org/packages/37/29/366bc7e588220d74ec00e497ac6710c2833c9176f0372fe0286929b2d64c/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9", size = 38548177 }, + { url = "https://files.pythonhosted.org/packages/c8/11/fabf6ecabb1fe5b7d96889228ca2a9158c4c3bb732e3b8ee3f7f6d40b703/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba", size = 40043567 }, ] [[package]] name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, -] - -[[package]] -name = "pydantic" -version = "2.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/70/7e/fb60e6fee04d0ef8f15e4e01ff187a196fa976eb0f0ab524af4599e5754c/pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06", size = 762094, upload-time = "2024-12-18T17:09:24.84Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/26/3e1bbe954fde7ee22a6e7d31582c642aad9e84ffe4b5fb61e63b87cd326f/pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d", size = 431765, upload-time = "2024-12-18T17:09:21.953Z" }, -] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443, upload-time = "2024-12-18T11:31:54.917Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127, upload-time = "2024-12-18T11:28:30.346Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340, upload-time = "2024-12-18T11:28:32.521Z" }, - { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900, upload-time = "2024-12-18T11:28:34.507Z" }, - { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177, upload-time = "2024-12-18T11:28:36.488Z" }, - { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046, upload-time = "2024-12-18T11:28:39.409Z" }, - { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386, upload-time = "2024-12-18T11:28:41.221Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060, upload-time = "2024-12-18T11:28:44.709Z" }, - { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870, upload-time = "2024-12-18T11:28:46.839Z" }, - { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822, upload-time = "2024-12-18T11:28:48.896Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364, upload-time = "2024-12-18T11:28:50.755Z" }, - { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303, upload-time = "2024-12-18T11:28:54.122Z" }, - { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064, upload-time = "2024-12-18T11:28:56.074Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046, upload-time = "2024-12-18T11:28:58.107Z" }, - { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092, upload-time = "2024-12-18T11:29:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, ] [[package]] name = "pygments" version = "2.18.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905, upload-time = "2024-05-04T13:42:02.013Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513, upload-time = "2024-05-04T13:41:57.345Z" }, + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, ] [[package]] name = "pyparsing" -version = "3.2.1" +version = "3.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/1a/3544f4f299a47911c2ab3710f534e52fea62a633c96806995da5d25be4b2/pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a", size = 1067694, upload-time = "2024-12-31T20:59:46.157Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/d5/e5aeee5387091148a19e1145f63606619cb5f20b83fccb63efae6474e7b2/pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c", size = 920984 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/a7/c8a2d361bf89c0d9577c934ebb7421b25dc84bf3a8e3ac0a40aed9acc547/pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1", size = 107716, upload-time = "2024-12-31T20:59:42.738Z" }, + { url = "https://files.pythonhosted.org/packages/be/ec/2eb3cd785efd67806c46c13a17339708ddc346cbb684eade7a6e6f79536a/pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84", size = 106921 }, ] [[package]] @@ -1011,9 +893,9 @@ dependencies = [ { name = "packaging" }, { name = "pluggy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919, upload-time = "2024-12-01T12:54:25.98Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083, upload-time = "2024-12-01T12:54:19.735Z" }, + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, ] [[package]] @@ -1023,45 +905,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] [[package]] name = "pytz" version = "2024.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692, upload-time = "2024-09-11T02:24:47.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002, upload-time = "2024-09-11T02:24:45.8Z" }, + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, ] [[package]] name = "pywin32" -version = "307" +version = "308" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/4e/9c660fa6c34db3c9542c9682b0ccd9edd63a6a4cb6ac4d22014b2c3355c9/pywin32-307-cp312-cp312-win32.whl", hash = "sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815", size = 5916997, upload-time = "2024-10-04T19:58:32.086Z" }, - { url = "https://files.pythonhosted.org/packages/9c/11/c56e771d2cdbd2dac8e656edb2c814e4b2239da2c9028aa7265cdfff8aed/pywin32-307-cp312-cp312-win_amd64.whl", hash = "sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347", size = 6519708, upload-time = "2024-10-04T19:58:34.597Z" }, - { url = "https://files.pythonhosted.org/packages/cd/64/53b1112cb05f85a6c87339a9f90a3b82d67ecb46f16b45abaac3bf4dee2b/pywin32-307-cp312-cp312-win_arm64.whl", hash = "sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2", size = 7952978, upload-time = "2024-10-04T19:58:36.518Z" }, + { url = "https://files.pythonhosted.org/packages/a9/a4/aa562d8935e3df5e49c161b427a3a2efad2ed4e9cf81c3de636f1fdddfd0/pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed", size = 5938579 }, + { url = "https://files.pythonhosted.org/packages/c7/50/b0efb8bb66210da67a53ab95fd7a98826a97ee21f1d22949863e6d588b22/pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4", size = 6542056 }, + { url = "https://files.pythonhosted.org/packages/26/df/2b63e3e4f2df0224f8aaf6d131f54fe4e8c96400eb9df563e2aae2e1a1f9/pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd", size = 7974986 }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, ] [[package]] @@ -1071,57 +953,29 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "implementation_name == 'pypy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/05/bed626b9f7bb2322cdbbf7b4bd8f54b1b617b0d2ab2d3547d6e39428a48e/pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f", size = 271975, upload-time = "2024-08-22T09:02:03.351Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/28/2f/78a766c8913ad62b28581777ac4ede50c6d9f249d39c2963e279524a1bbe/pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9", size = 1343105, upload-time = "2024-08-22T08:59:53.18Z" }, - { url = "https://files.pythonhosted.org/packages/b7/9c/4b1e2d3d4065be715e007fe063ec7885978fad285f87eae1436e6c3201f4/pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52", size = 1008365, upload-time = "2024-08-22T08:59:54.4Z" }, - { url = "https://files.pythonhosted.org/packages/4f/ef/5a23ec689ff36d7625b38d121ef15abfc3631a9aecb417baf7a4245e4124/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08", size = 665923, upload-time = "2024-08-22T08:59:55.568Z" }, - { url = "https://files.pythonhosted.org/packages/ae/61/d436461a47437d63c6302c90724cf0981883ec57ceb6073873f32172d676/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5", size = 903400, upload-time = "2024-08-22T08:59:57.001Z" }, - { url = "https://files.pythonhosted.org/packages/47/42/fc6d35ecefe1739a819afaf6f8e686f7f02a4dd241c78972d316f403474c/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae", size = 860034, upload-time = "2024-08-22T08:59:58.259Z" }, - { url = "https://files.pythonhosted.org/packages/07/3b/44ea6266a6761e9eefaa37d98fabefa112328808ac41aa87b4bbb668af30/pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711", size = 860579, upload-time = "2024-08-22T08:59:59.514Z" }, - { url = "https://files.pythonhosted.org/packages/38/6f/4df2014ab553a6052b0e551b37da55166991510f9e1002c89cab7ce3b3f2/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6", size = 1196246, upload-time = "2024-08-22T09:00:01.117Z" }, - { url = "https://files.pythonhosted.org/packages/38/9d/ee240fc0c9fe9817f0c9127a43238a3e28048795483c403cc10720ddef22/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3", size = 1507441, upload-time = "2024-08-22T09:00:02.851Z" }, - { url = "https://files.pythonhosted.org/packages/85/4f/01711edaa58d535eac4a26c294c617c9a01f09857c0ce191fd574d06f359/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b", size = 1406498, upload-time = "2024-08-22T09:00:04.907Z" }, - { url = "https://files.pythonhosted.org/packages/07/18/907134c85c7152f679ed744e73e645b365f3ad571f38bdb62e36f347699a/pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7", size = 575533, upload-time = "2024-08-22T09:00:06.326Z" }, - { url = "https://files.pythonhosted.org/packages/ce/2c/a6f4a20202a4d3c582ad93f95ee78d79bbdc26803495aec2912b17dbbb6c/pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a", size = 637768, upload-time = "2024-08-22T09:00:08.137Z" }, - { url = "https://files.pythonhosted.org/packages/5f/0e/eb16ff731632d30554bf5af4dbba3ffcd04518219d82028aea4ae1b02ca5/pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b", size = 540675, upload-time = "2024-08-22T09:00:09.479Z" }, -] - -[[package]] -name = "referencing" -version = "0.36.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "rpds-py" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, -] - -[[package]] -name = "regex" -version = "2024.11.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781, upload-time = "2024-11-06T20:10:07.07Z" }, - { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455, upload-time = "2024-11-06T20:10:09.117Z" }, - { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759, upload-time = "2024-11-06T20:10:11.155Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, - { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, - { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, - { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, - { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, - { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, - { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, - { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135, upload-time = "2024-11-06T20:10:40.367Z" }, - { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567, upload-time = "2024-11-06T20:10:43.467Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/fd/05/bed626b9f7bb2322cdbbf7b4bd8f54b1b617b0d2ab2d3547d6e39428a48e/pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f", size = 271975 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/a7/0f7e2f6c126fe6e62dbae0bc93b1bd3f1099cf7fea47a5468defebe3f39d/pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726", size = 1006564 }, + { url = "https://files.pythonhosted.org/packages/31/b6/a187165c852c5d49f826a690857684333a6a4a065af0a6015572d2284f6a/pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3", size = 1340447 }, + { url = "https://files.pythonhosted.org/packages/68/ba/f4280c58ff71f321602a6e24fd19879b7e79793fb8ab14027027c0fb58ef/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50", size = 665485 }, + { url = "https://files.pythonhosted.org/packages/77/b5/c987a5c53c7d8704216f29fc3d810b32f156bcea488a940e330e1bcbb88d/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb", size = 903484 }, + { url = "https://files.pythonhosted.org/packages/29/c9/07da157d2db18c72a7eccef8e684cefc155b712a88e3d479d930aa9eceba/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187", size = 859981 }, + { url = "https://files.pythonhosted.org/packages/43/09/e12501bd0b8394b7d02c41efd35c537a1988da67fc9c745cae9c6c776d31/pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b", size = 860334 }, + { url = "https://files.pythonhosted.org/packages/eb/ff/f5ec1d455f8f7385cc0a8b2acd8c807d7fade875c14c44b85c1bddabae21/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18", size = 1196179 }, + { url = "https://files.pythonhosted.org/packages/ec/8a/bb2ac43295b1950fe436a81fc5b298be0b96ac76fb029b514d3ed58f7b27/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115", size = 1507668 }, + { url = "https://files.pythonhosted.org/packages/a9/49/dbc284ebcfd2dca23f6349227ff1616a7ee2c4a35fe0a5d6c3deff2b4fed/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e", size = 1406539 }, + { url = "https://files.pythonhosted.org/packages/00/68/093cdce3fe31e30a341d8e52a1ad86392e13c57970d722c1f62a1d1a54b6/pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5", size = 575567 }, + { url = "https://files.pythonhosted.org/packages/92/ae/6cc4657148143412b5819b05e362ae7dd09fb9fe76e2a539dcff3d0386bc/pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad", size = 637551 }, + { url = "https://files.pythonhosted.org/packages/6c/67/fbff102e201688f97c8092e4c3445d1c1068c2f27bbd45a578df97ed5f94/pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797", size = 540378 }, + { url = "https://files.pythonhosted.org/packages/3f/fe/2d998380b6e0122c6c4bdf9b6caf490831e5f5e2d08a203b5adff060c226/pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a", size = 1007378 }, + { url = "https://files.pythonhosted.org/packages/4a/f4/30d6e7157f12b3a0390bde94d6a8567cdb88846ed068a6e17238a4ccf600/pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc", size = 1329532 }, + { url = "https://files.pythonhosted.org/packages/82/86/3fe917870e15ee1c3ad48229a2a64458e36036e64b4afa9659045d82bfa8/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5", size = 653242 }, + { url = "https://files.pythonhosted.org/packages/50/2d/242e7e6ef6c8c19e6cb52d095834508cd581ffb925699fd3c640cdc758f1/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672", size = 888404 }, + { url = "https://files.pythonhosted.org/packages/ac/11/7270566e1f31e4ea73c81ec821a4b1688fd551009a3d2bab11ec66cb1e8f/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797", size = 845858 }, + { url = "https://files.pythonhosted.org/packages/91/d5/72b38fbc69867795c8711bdd735312f9fef1e3d9204e2f63ab57085434b9/pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386", size = 847375 }, + { url = "https://files.pythonhosted.org/packages/dd/9a/10ed3c7f72b4c24e719c59359fbadd1a27556a28b36cdf1cd9e4fb7845d5/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306", size = 1183489 }, + { url = "https://files.pythonhosted.org/packages/72/2d/8660892543fabf1fe41861efa222455811adac9f3c0818d6c3170a1153e3/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6", size = 1492932 }, + { url = "https://files.pythonhosted.org/packages/7b/d6/32fd69744afb53995619bc5effa2a405ae0d343cd3e747d0fbc43fe894ee/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0", size = 1392485 }, ] [[package]] @@ -1134,105 +988,51 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, ] [[package]] -name = "rpds-py" -version = "0.24.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/b3/52b213298a0ba7097c7ea96bee95e1947aa84cc816d48cebb539770cdf41/rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e", size = 26863, upload-time = "2025-03-26T14:56:01.518Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/e0/1c55f4a3be5f1ca1a4fd1f3ff1504a1478c1ed48d84de24574c4fa87e921/rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205", size = 366945, upload-time = "2025-03-26T14:53:28.149Z" }, - { url = "https://files.pythonhosted.org/packages/39/1b/a3501574fbf29118164314dbc800d568b8c1c7b3258b505360e8abb3902c/rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7", size = 351935, upload-time = "2025-03-26T14:53:29.684Z" }, - { url = "https://files.pythonhosted.org/packages/dc/47/77d3d71c55f6a374edde29f1aca0b2e547325ed00a9da820cabbc9497d2b/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9", size = 390817, upload-time = "2025-03-26T14:53:31.177Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ec/1e336ee27484379e19c7f9cc170f4217c608aee406d3ae3a2e45336bff36/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e", size = 401983, upload-time = "2025-03-26T14:53:33.163Z" }, - { url = "https://files.pythonhosted.org/packages/07/f8/39b65cbc272c635eaea6d393c2ad1ccc81c39eca2db6723a0ca4b2108fce/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda", size = 451719, upload-time = "2025-03-26T14:53:34.721Z" }, - { url = "https://files.pythonhosted.org/packages/32/05/05c2b27dd9c30432f31738afed0300659cb9415db0ff7429b05dfb09bbde/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e", size = 442546, upload-time = "2025-03-26T14:53:36.26Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e0/19383c8b5d509bd741532a47821c3e96acf4543d0832beba41b4434bcc49/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029", size = 393695, upload-time = "2025-03-26T14:53:37.728Z" }, - { url = "https://files.pythonhosted.org/packages/9d/15/39f14e96d94981d0275715ae8ea564772237f3fa89bc3c21e24de934f2c7/rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9", size = 427218, upload-time = "2025-03-26T14:53:39.326Z" }, - { url = "https://files.pythonhosted.org/packages/22/b9/12da7124905a680f690da7a9de6f11de770b5e359f5649972f7181c8bf51/rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7", size = 568062, upload-time = "2025-03-26T14:53:40.885Z" }, - { url = "https://files.pythonhosted.org/packages/88/17/75229017a2143d915f6f803721a6d721eca24f2659c5718a538afa276b4f/rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91", size = 596262, upload-time = "2025-03-26T14:53:42.544Z" }, - { url = "https://files.pythonhosted.org/packages/aa/64/8e8a1d8bd1b6b638d6acb6d41ab2cec7f2067a5b8b4c9175703875159a7c/rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56", size = 564306, upload-time = "2025-03-26T14:53:44.2Z" }, - { url = "https://files.pythonhosted.org/packages/68/1c/a7eac8d8ed8cb234a9b1064647824c387753343c3fab6ed7c83481ed0be7/rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30", size = 224281, upload-time = "2025-03-26T14:53:45.769Z" }, - { url = "https://files.pythonhosted.org/packages/bb/46/b8b5424d1d21f2f2f3f2d468660085318d4f74a8df8289e3dd6ad224d488/rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034", size = 239719, upload-time = "2025-03-26T14:53:47.187Z" }, -] - -[[package]] -name = "ruff" -version = "0.8.5" +name = "scipy" +version = "1.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/25/5d/4b5403f3e89837decfd54c51bea7f94b7d3fae77e08858603d0e04d7ad17/ruff-0.8.5.tar.gz", hash = "sha256:1098d36f69831f7ff2a1da3e6407d5fbd6dfa2559e4f74ff2d260c5588900317", size = 3454835, upload-time = "2025-01-02T12:04:16.105Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/f8/03391745a703ce11678eb37c48ae89ec60396ea821e9d0bcea7c8e88fd91/ruff-0.8.5-py3-none-linux_armv6l.whl", hash = "sha256:5ad11a5e3868a73ca1fa4727fe7e33735ea78b416313f4368c504dbeb69c0f88", size = 10626889, upload-time = "2025-01-02T12:03:14.406Z" }, - { url = "https://files.pythonhosted.org/packages/55/74/83bb74a44183b904216f3edfb9995b89830c83aaa6ce84627f74da0e0cf8/ruff-0.8.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f69ab37771ea7e0715fead8624ec42996d101269a96e31f4d31be6fc33aa19b7", size = 10398233, upload-time = "2025-01-02T12:03:18.107Z" }, - { url = "https://files.pythonhosted.org/packages/e8/7a/a162a4feb3ef85d594527165e366dde09d7a1e534186ff4ba5d127eda850/ruff-0.8.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b5462d7804558ccff9c08fe8cbf6c14b7efe67404316696a2dde48297b1925bb", size = 10001843, upload-time = "2025-01-02T12:03:22.265Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9f/5ee5dcd135411402e35b6ec6a8dfdadbd31c5cd1c36a624d356a38d76090/ruff-0.8.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d56de7220a35607f9fe59f8a6d018e14504f7b71d784d980835e20fc0611cd50", size = 10872507, upload-time = "2025-01-02T12:03:25.198Z" }, - { url = "https://files.pythonhosted.org/packages/b6/67/db2df2dd4a34b602d7f6ebb1b3744c8157f0d3579973ffc58309c9c272e8/ruff-0.8.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9d99cf80b0429cbebf31cbbf6f24f05a29706f0437c40413d950e67e2d4faca4", size = 10377200, upload-time = "2025-01-02T12:03:29.499Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ff/fe3a6a73006bced73e60d171d154a82430f61d97e787f511a24bd6302611/ruff-0.8.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b75ac29715ac60d554a049dbb0ef3b55259076181c3369d79466cb130eb5afd", size = 11433155, upload-time = "2025-01-02T12:03:33.293Z" }, - { url = "https://files.pythonhosted.org/packages/e3/95/c1d1a1fe36658c1f3e1b47e1cd5f688b72d5786695b9e621c2c38399a95e/ruff-0.8.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c9d526a62c9eda211b38463528768fd0ada25dad524cb33c0e99fcff1c67b5dc", size = 12139227, upload-time = "2025-01-02T12:03:36.318Z" }, - { url = "https://files.pythonhosted.org/packages/1b/fe/644b70d473a27b5112ac7a3428edcc1ce0db775c301ff11aa146f71886e0/ruff-0.8.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:587c5e95007612c26509f30acc506c874dab4c4abbacd0357400bd1aa799931b", size = 11697941, upload-time = "2025-01-02T12:03:40.544Z" }, - { url = "https://files.pythonhosted.org/packages/00/39/4f83e517ec173e16a47c6d102cd22a1aaebe80e1208a1f2e83ab9a0e4134/ruff-0.8.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:622b82bf3429ff0e346835ec213aec0a04d9730480cbffbb6ad9372014e31bbd", size = 12967686, upload-time = "2025-01-02T12:03:43.751Z" }, - { url = "https://files.pythonhosted.org/packages/1a/f6/52a2973ff108d74b5da706a573379eea160bece098f7cfa3f35dc4622710/ruff-0.8.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99be814d77a5dac8a8957104bdd8c359e85c86b0ee0e38dca447cb1095f70fb", size = 11253788, upload-time = "2025-01-02T12:03:48.222Z" }, - { url = "https://files.pythonhosted.org/packages/ce/1f/3b30f3c65b1303cb8e268ec3b046b77ab21ed8e26921cfc7e8232aa57f2c/ruff-0.8.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c01c048f9c3385e0fd7822ad0fd519afb282af9cf1778f3580e540629df89725", size = 10860360, upload-time = "2025-01-02T12:03:51.34Z" }, - { url = "https://files.pythonhosted.org/packages/a5/a8/2a3ea6bacead963f7aeeba0c61815d9b27b0d638e6a74984aa5cc5d27733/ruff-0.8.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7512e8cb038db7f5db6aae0e24735ff9ea03bb0ed6ae2ce534e9baa23c1dc9ea", size = 10457922, upload-time = "2025-01-02T12:03:55.212Z" }, - { url = "https://files.pythonhosted.org/packages/17/47/8f9514b670969aab57c5fc826fb500a16aee8feac1bcf8a91358f153a5ba/ruff-0.8.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:762f113232acd5b768d6b875d16aad6b00082add40ec91c927f0673a8ec4ede8", size = 10958347, upload-time = "2025-01-02T12:03:59.214Z" }, - { url = "https://files.pythonhosted.org/packages/0d/d6/78a9af8209ad99541816d74f01ce678fc01ebb3f37dd7ab8966646dcd92b/ruff-0.8.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:03a90200c5dfff49e4c967b405f27fdfa81594cbb7c5ff5609e42d7fe9680da5", size = 11328882, upload-time = "2025-01-02T12:04:02.224Z" }, - { url = "https://files.pythonhosted.org/packages/54/77/5c8072ec7afdfdf42c7a4019044486a2b6c85ee73617f8875ec94b977fed/ruff-0.8.5-py3-none-win32.whl", hash = "sha256:8710ffd57bdaa6690cbf6ecff19884b8629ec2a2a2a2f783aa94b1cc795139ed", size = 8802515, upload-time = "2025-01-02T12:04:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/bc/b6/47d2b06784de8ae992c45cceb2a30f3f205b3236a629d7ca4c0c134839a2/ruff-0.8.5-py3-none-win_amd64.whl", hash = "sha256:4020d8bf8d3a32325c77af452a9976a9ad6455773bcb94991cf15bd66b347e47", size = 9684231, upload-time = "2025-01-02T12:04:08.414Z" }, - { url = "https://files.pythonhosted.org/packages/bf/5e/ffee22bf9f9e4b2669d1f0179ae8804584939fb6502b51f2401e26b1e028/ruff-0.8.5-py3-none-win_arm64.whl", hash = "sha256:134ae019ef13e1b060ab7136e7828a6d83ea727ba123381307eb37c6bd5e01cb", size = 9124741, upload-time = "2025-01-02T12:04:11.189Z" }, +dependencies = [ + { name = "numpy" }, ] - -[[package]] -name = "safetensors" -version = "0.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5d/b3/1d9000e9d0470499d124ca63c6908f8092b528b48bd95ba11507e14d9dba/safetensors-0.5.0.tar.gz", hash = "sha256:c47b34c549fa1e0c655c4644da31332c61332c732c47c8dd9399347e9aac69d1", size = 65660, upload-time = "2025-01-02T16:39:36.705Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/11/4d44a1f274e002784e4dbdb81e0ea96d2de2d1045b2132d5af62cc31fd28/scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417", size = 58620554 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/ee/0fd61b99bc58db736a3ab3d97d49d4a11afe71ee0aad85b25d6c4235b743/safetensors-0.5.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c683b9b485bee43422ba2855f72777c37647190281e03da4c8d2a69fa5336558", size = 426509, upload-time = "2025-01-02T16:39:20.469Z" }, - { url = "https://files.pythonhosted.org/packages/51/aa/de1a11aa056d0241f95d5de9dbb1ac2dabaf3df5c568f9375451fd593c95/safetensors-0.5.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6106aa835deb7263f7014f74c05842ab828d6c11d789f2e7e98f26b1a305e72d", size = 408471, upload-time = "2025-01-02T16:39:19.184Z" }, - { url = "https://files.pythonhosted.org/packages/a5/c7/84b821bd90547a909053a8526ff70446f062287cda20d0ec024c1a1f80f6/safetensors-0.5.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1349611f74f55c5ee1c1c144c536a2743c38f7d8bf60b9fc8267e0efc0591a2", size = 449638, upload-time = "2025-01-02T16:39:05.345Z" }, - { url = "https://files.pythonhosted.org/packages/b5/25/3d20bb9f669fec704e01d70849e9c6c054601efe9b5e784ce9a865cf3c52/safetensors-0.5.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:56d936028ac799e18644b08a91fd98b4b62ae3dcd0440b1cfcb56535785589f1", size = 458246, upload-time = "2025-01-02T16:39:07.34Z" }, - { url = "https://files.pythonhosted.org/packages/31/35/68e1c39c4ad6a2f9373fc89588c0fbd29b1899c57c3a6482fc8e42fa4c8f/safetensors-0.5.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f26afada2233576ffea6b80042c2c0a8105c164254af56168ec14299ad3122", size = 509573, upload-time = "2025-01-02T16:39:08.799Z" }, - { url = "https://files.pythonhosted.org/packages/85/b0/79927c6d4f70232f04a46785ea8b0ed0f70f9be74d17e0a90e1890523553/safetensors-0.5.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20067e7a5e63f0cbc88457b2a1161e70ff73af4cc3a24bce90309430cd6f6e7e", size = 525555, upload-time = "2025-01-02T16:39:11.439Z" }, - { url = "https://files.pythonhosted.org/packages/a6/83/ca8c1af662a20a545c174b8949e63865b747c180b607260eed83c1d38c72/safetensors-0.5.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649d6a4aa34d5174ae87289068ccc2fec2a1a998ecf83425aa5a42c3eff69bcf", size = 461294, upload-time = "2025-01-02T16:39:15.946Z" }, - { url = "https://files.pythonhosted.org/packages/81/ef/1d11d08b14b36e3e3d701629c9685ad95c3afee7da2851658d6c65cad9be/safetensors-0.5.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:debff88f41d569a3e93a955469f83864e432af35bb34b16f65a9ddf378daa3ae", size = 490593, upload-time = "2025-01-02T16:39:13.144Z" }, - { url = "https://files.pythonhosted.org/packages/f6/9a/50bf824a26d768d33485b7208ba5e6a173a80a2633be5e213a2494d1569b/safetensors-0.5.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:bdf6a3e366ea8ba1a0538db6099229e95811194432c684ea28ea7ae28763b8dc", size = 628142, upload-time = "2025-01-02T16:39:23.507Z" }, - { url = "https://files.pythonhosted.org/packages/28/22/dc5ae22523b8221017dbf6984fedfe2c6f35ff4cc76e80bbab2b9e14cc8a/safetensors-0.5.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:0371afd84c200a80eb7103bf715108b0c3846132fb82453ae018609a15551580", size = 721377, upload-time = "2025-01-02T16:39:26.055Z" }, - { url = "https://files.pythonhosted.org/packages/fe/87/36323e8058e7101ef0101fde6d71c375a9ab6059d3d9501fe8fb8d13a45a/safetensors-0.5.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5ec7fc8c3d2f32ebf1c7011bc886b362e53ee0a1ec6d828c39d531fed8b325d6", size = 659192, upload-time = "2025-01-02T16:39:29.67Z" }, - { url = "https://files.pythonhosted.org/packages/dd/2f/8d526f06bb192b45b4e0fec94284d568497e6e19620c834373749a5f9787/safetensors-0.5.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:53715e4ea0ef23c08f004baae0f609a7773de7d4148727760417c6760cfd6b76", size = 632231, upload-time = "2025-01-02T16:39:34.136Z" }, - { url = "https://files.pythonhosted.org/packages/d3/68/1166bba02f77c811d17766e54a54d7714c1276f54bfcf60d50bb9326a1b4/safetensors-0.5.0-cp38-abi3-win32.whl", hash = "sha256:b85565bc2f0456961a788d2f11d9d892eec46603db0e4923aa9512c2355aa727", size = 290608, upload-time = "2025-01-02T16:39:42.313Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ab/a428973e43a77791d2fd4b6425f4fd82e9f8559b32222c861acbbd7bc910/safetensors-0.5.0-cp38-abi3-win_amd64.whl", hash = "sha256:f451941f8aa11e7be5c3fa450e264609a2b1e65fa38ae590a74e55a94d646b76", size = 303322, upload-time = "2025-01-02T16:39:41.011Z" }, + { url = "https://files.pythonhosted.org/packages/50/ef/ac98346db016ff18a6ad7626a35808f37074d25796fd0234c2bb0ed1e054/scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79", size = 39091068 }, + { url = "https://files.pythonhosted.org/packages/b9/cc/70948fe9f393b911b4251e96b55bbdeaa8cca41f37c26fd1df0232933b9e/scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e", size = 29875417 }, + { url = "https://files.pythonhosted.org/packages/3b/2e/35f549b7d231c1c9f9639f9ef49b815d816bf54dd050da5da1c11517a218/scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73", size = 23084508 }, + { url = "https://files.pythonhosted.org/packages/3f/d6/b028e3f3e59fae61fb8c0f450db732c43dd1d836223a589a8be9f6377203/scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e", size = 25503364 }, + { url = "https://files.pythonhosted.org/packages/a7/2f/6c142b352ac15967744d62b165537a965e95d557085db4beab2a11f7943b/scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d", size = 35292639 }, + { url = "https://files.pythonhosted.org/packages/56/46/2449e6e51e0d7c3575f289f6acb7f828938eaab8874dbccfeb0cd2b71a27/scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e", size = 40798288 }, + { url = "https://files.pythonhosted.org/packages/32/cd/9d86f7ed7f4497c9fd3e39f8918dd93d9f647ba80d7e34e4946c0c2d1a7c/scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06", size = 42524647 }, + { url = "https://files.pythonhosted.org/packages/f5/1b/6ee032251bf4cdb0cc50059374e86a9f076308c1512b61c4e003e241efb7/scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84", size = 44469524 }, ] [[package]] -name = "scipy" -version = "1.15.0" +name = "seaborn" +version = "0.13.2" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "matplotlib" }, { name = "numpy" }, + { name = "pandas" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/7b/2b8ac283cf32465ed08bc20a83d559fe7b174a484781702ba8accea001d6/scipy-1.15.0.tar.gz", hash = "sha256:300742e2cc94e36a2880ebe464a1c8b4352a7b0f3e36ec3d2ac006cdbe0219ac", size = 59407226, upload-time = "2025-01-03T14:38:02.164Z" } +sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696 } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/70/fffb90a725dec6056c9059073856fd99de22a253459a874a63b8b8a012db/scipy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5beb0a2200372b7416ec73fdae94fe81a6e85e44eb49c35a11ac356d2b8eccc6", size = 41475240, upload-time = "2025-01-03T14:32:46.815Z" }, - { url = "https://files.pythonhosted.org/packages/63/ca/6b838a2e5e6718d879e8522d1155a068c2a769be04f7da8c5179ead32a7b/scipy-1.15.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fde0f3104dfa1dfbc1f230f65506532d0558d43188789eaf68f97e106249a913", size = 32595923, upload-time = "2025-01-03T14:32:53Z" }, - { url = "https://files.pythonhosted.org/packages/b1/07/4e69f6f7185915d77719bf226c1d554a4bb99f27cb92161fdd57b1434343/scipy-1.15.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:35c68f7044b4e7ad73a3e68e513dda946989e523df9b062bd3cf401a1a882192", size = 24869617, upload-time = "2025-01-03T14:32:58.9Z" }, - { url = "https://files.pythonhosted.org/packages/30/22/e3dadf189dcab215be461efe0fd9d288f4c2d99783c4aec2ce80837800b7/scipy-1.15.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:52475011be29dfcbecc3dfe3060e471ac5155d72e9233e8d5616b84e2b542054", size = 28007674, upload-time = "2025-01-03T14:33:04.175Z" }, - { url = "https://files.pythonhosted.org/packages/51/0f/71c9ee2acaac0660a79e36424d367ed5737e4ef27b885f96cd439f451467/scipy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5972e3f96f7dda4fd3bb85906a17338e65eaddfe47f750e240f22b331c08858e", size = 38066684, upload-time = "2025-01-03T14:33:11.815Z" }, - { url = "https://files.pythonhosted.org/packages/fb/77/74a1ceecb205f5d46fe2cd10071383748ee8891a96b7824a372391a6291c/scipy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe00169cf875bed0b3c40e4da45b57037dc21d7c7bf0c85ed75f210c281488f1", size = 40250011, upload-time = "2025-01-03T14:33:18.729Z" }, - { url = "https://files.pythonhosted.org/packages/8c/9f/f1544110a3d31183034e05422836505beb438aa56183f2ccef6dcd3b4e3f/scipy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:161f80a98047c219c257bf5ce1777c574bde36b9d962a46b20d0d7e531f86863", size = 42625471, upload-time = "2025-01-03T14:33:27.97Z" }, - { url = "https://files.pythonhosted.org/packages/3f/39/a29b75f9c30084cbafd416bfa00933311a5b7a96be6e88750c98521d2ccb/scipy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:327163ad73e54541a675240708244644294cb0a65cca420c9c79baeb9648e479", size = 43622832, upload-time = "2025-01-03T14:33:36.803Z" }, + { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914 }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] [[package]] @@ -1244,9 +1044,9 @@ dependencies = [ { name = "executing" }, { name = "pure-eval" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, ] [[package]] @@ -1260,75 +1060,41 @@ dependencies = [ { name = "patsy" }, { name = "scipy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/3b/963a015dd8ea17e10c7b0e2f14d7c4daec903baf60a017e756b57953a4bf/statsmodels-0.14.4.tar.gz", hash = "sha256:5d69e0f39060dc72c067f9bb6e8033b6dccdb0bae101d76a7ef0bcc94e898b67", size = 20354802, upload-time = "2024-10-03T16:15:36.273Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/99/654fd41a9024643ee70b239e5ebc987bf98ce9fc2693bd550bee58136564/statsmodels-0.14.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5221dba7424cf4f2561b22e9081de85f5bb871228581124a0d1b572708545199", size = 10220508, upload-time = "2024-10-03T17:10:31.183Z" }, - { url = "https://files.pythonhosted.org/packages/67/d8/ac30cf4cf97adaa48548be57e7cf02e894f31b45fd55bf9213358d9781c9/statsmodels-0.14.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:17672b30c6b98afe2b095591e32d1d66d4372f2651428e433f16a3667f19eabb", size = 9912317, upload-time = "2024-10-03T16:22:29.504Z" }, - { url = "https://files.pythonhosted.org/packages/e0/77/2440d551eaf27f9c1d3650e13b3821a35ad5b21d3a19f62fb302af9203e8/statsmodels-0.14.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab5e6312213b8cfb9dca93dd46a0f4dccb856541f91d3306227c3d92f7659245", size = 10301662, upload-time = "2024-10-03T17:13:04.537Z" }, - { url = "https://files.pythonhosted.org/packages/fa/e1/60a652f18996a40a7410aeb7eb476c18da8a39792c7effe67f06883e9852/statsmodels-0.14.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbb150620b53133d6cd1c5d14c28a4f85701e6c781d9b689b53681effaa655f", size = 10741763, upload-time = "2024-10-03T17:13:17.594Z" }, - { url = "https://files.pythonhosted.org/packages/81/0c/2453eec3ac25e300847d9ed97f41156de145e507391ecb5ac989e111e525/statsmodels-0.14.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb695c2025d122a101c2aca66d2b78813c321b60d3a7c86bb8ec4467bb53b0f9", size = 10879534, upload-time = "2024-10-03T17:13:31.19Z" }, - { url = "https://files.pythonhosted.org/packages/59/9a/e466a1b887a1441141e52dbcc98152f013d85076576da6eed2357f2016ae/statsmodels-0.14.4-cp312-cp312-win_amd64.whl", hash = "sha256:7f7917a51766b4e074da283c507a25048ad29a18e527207883d73535e0dc6184", size = 9823866, upload-time = "2024-10-03T16:14:23.828Z" }, -] - -[[package]] -name = "tabulate" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, -] - -[[package]] -name = "tokenizers" -version = "0.21.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "huggingface-hub" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/20/41/c2be10975ca37f6ec40d7abd7e98a5213bb04f284b869c1a24e6504fd94d/tokenizers-0.21.0.tar.gz", hash = "sha256:ee0894bf311b75b0c03079f33859ae4b2334d675d4e93f5a4132e1eae2834fe4", size = 343021, upload-time = "2024-11-27T13:11:23.89Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/3b/963a015dd8ea17e10c7b0e2f14d7c4daec903baf60a017e756b57953a4bf/statsmodels-0.14.4.tar.gz", hash = "sha256:5d69e0f39060dc72c067f9bb6e8033b6dccdb0bae101d76a7ef0bcc94e898b67", size = 20354802 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/5c/8b09607b37e996dc47e70d6a7b6f4bdd4e4d5ab22fe49d7374565c7fefaf/tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2", size = 2647461, upload-time = "2024-11-27T13:11:07.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/7a/88e58bb297c22633ed1c9d16029316e5b5ac5ee44012164c2edede599a5e/tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e", size = 2563639, upload-time = "2024-11-27T13:11:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/f7/14/83429177c19364df27d22bc096d4c2e431e0ba43e56c525434f1f9b0fd00/tokenizers-0.21.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b177fb54c4702ef611de0c069d9169f0004233890e0c4c5bd5508ae05abf193", size = 2903304, upload-time = "2024-11-27T13:10:51.315Z" }, - { url = "https://files.pythonhosted.org/packages/7e/db/3433eab42347e0dc5452d8fcc8da03f638c9accffefe5a7c78146666964a/tokenizers-0.21.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b43779a269f4629bebb114e19c3fca0223296ae9fea8bb9a7a6c6fb0657ff8e", size = 2804378, upload-time = "2024-11-27T13:10:53.513Z" }, - { url = "https://files.pythonhosted.org/packages/57/8b/7da5e6f89736c2ade02816b4733983fca1c226b0c42980b1ae9dc8fcf5cc/tokenizers-0.21.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aeb255802be90acfd363626753fda0064a8df06031012fe7d52fd9a905eb00e", size = 3095488, upload-time = "2024-11-27T13:11:00.662Z" }, - { url = "https://files.pythonhosted.org/packages/4d/f6/5ed6711093dc2c04a4e03f6461798b12669bc5a17c8be7cce1240e0b5ce8/tokenizers-0.21.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8b09dbeb7a8d73ee204a70f94fc06ea0f17dcf0844f16102b9f414f0b7463ba", size = 3121410, upload-time = "2024-11-27T13:10:55.674Z" }, - { url = "https://files.pythonhosted.org/packages/81/42/07600892d48950c5e80505b81411044a2d969368cdc0d929b1c847bf6697/tokenizers-0.21.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:400832c0904f77ce87c40f1a8a27493071282f785724ae62144324f171377273", size = 3388821, upload-time = "2024-11-27T13:10:58.401Z" }, - { url = "https://files.pythonhosted.org/packages/22/06/69d7ce374747edaf1695a4f61b83570d91cc8bbfc51ccfecf76f56ab4aac/tokenizers-0.21.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84ca973b3a96894d1707e189c14a774b701596d579ffc7e69debfc036a61a04", size = 3008868, upload-time = "2024-11-27T13:11:03.734Z" }, - { url = "https://files.pythonhosted.org/packages/c8/69/54a0aee4d576045b49a0eb8bffdc495634309c823bf886042e6f46b80058/tokenizers-0.21.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:eb7202d231b273c34ec67767378cd04c767e967fda12d4a9e36208a34e2f137e", size = 8975831, upload-time = "2024-11-27T13:11:10.32Z" }, - { url = "https://files.pythonhosted.org/packages/f7/f3/b776061e4f3ebf2905ba1a25d90380aafd10c02d406437a8ba22d1724d76/tokenizers-0.21.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:089d56db6782a73a27fd8abf3ba21779f5b85d4a9f35e3b493c7bbcbbf0d539b", size = 8920746, upload-time = "2024-11-27T13:11:13.238Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ee/ce83d5ec8b6844ad4c3ecfe3333d58ecc1adc61f0878b323a15355bcab24/tokenizers-0.21.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:c87ca3dc48b9b1222d984b6b7490355a6fdb411a2d810f6f05977258400ddb74", size = 9161814, upload-time = "2024-11-27T13:11:16.675Z" }, - { url = "https://files.pythonhosted.org/packages/18/07/3e88e65c0ed28fa93aa0c4d264988428eef3df2764c3126dc83e243cb36f/tokenizers-0.21.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4145505a973116f91bc3ac45988a92e618a6f83eb458f49ea0790df94ee243ff", size = 9357138, upload-time = "2024-11-27T13:11:20.09Z" }, - { url = "https://files.pythonhosted.org/packages/15/b0/dc4572ca61555fc482ebc933f26cb407c6aceb3dc19c301c68184f8cad03/tokenizers-0.21.0-cp39-abi3-win32.whl", hash = "sha256:eb1702c2f27d25d9dd5b389cc1f2f51813e99f8ca30d9e25348db6585a97e24a", size = 2202266, upload-time = "2024-11-27T13:11:28.784Z" }, - { url = "https://files.pythonhosted.org/packages/44/69/d21eb253fa91622da25585d362a874fa4710be600f0ea9446d8d0217cec1/tokenizers-0.21.0-cp39-abi3-win_amd64.whl", hash = "sha256:87841da5a25a3a5f70c102de371db120f41873b854ba65e52bccd57df5a3780c", size = 2389192, upload-time = "2024-11-27T13:11:25.724Z" }, + { url = "https://files.pythonhosted.org/packages/31/f8/2662e6a101315ad336f75168fa9bac71f913ebcb92a6be84031d84a0f21f/statsmodels-0.14.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5a24f5d2c22852d807d2b42daf3a61740820b28d8381daaf59dcb7055bf1a79", size = 10186886 }, + { url = "https://files.pythonhosted.org/packages/fa/c0/ee6e8ed35fc1ca9c7538c592f4974547bf72274bc98db1ae4a6e87481a83/statsmodels-0.14.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df4f7864606fa843d7e7c0e6af288f034a2160dba14e6ccc09020a3cf67cb092", size = 9880066 }, + { url = "https://files.pythonhosted.org/packages/d1/97/3380ca6d8fd66cfb3d12941e472642f26e781a311c355a4e97aab2ed0216/statsmodels-0.14.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91341cbde9e8bea5fb419a76e09114e221567d03f34ca26e6d67ae2c27d8fe3c", size = 10283521 }, + { url = "https://files.pythonhosted.org/packages/fe/2a/55c5b5c5e5124a202ea3fe0bcdbdeceaf91b4ec6164b8434acb9dd97409c/statsmodels-0.14.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1322286a7bfdde2790bf72d29698a1b76c20b8423a55bdcd0d457969d0041f72", size = 10723228 }, + { url = "https://files.pythonhosted.org/packages/4f/76/67747e49dc758daae06f33aad8247b718cd7d224f091d2cd552681215bb2/statsmodels-0.14.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e31b95ac603415887c9f0d344cb523889cf779bc52d68e27e2d23c358958fec7", size = 10859503 }, + { url = "https://files.pythonhosted.org/packages/1d/eb/cb8b01f5edf8f135eb3d0553d159db113a35b2948d0e51eeb735e7ae09ea/statsmodels-0.14.4-cp313-cp313-win_amd64.whl", hash = "sha256:81030108d27aecc7995cac05aa280cf8c6025f6a6119894eef648997936c2dd0", size = 9817574 }, ] [[package]] -name = "tomlkit" -version = "0.13.2" +name = "toml" +version = "0.10.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885, upload-time = "2024-08-14T08:19:41.488Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955, upload-time = "2024-08-14T08:19:40.05Z" }, + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, ] [[package]] name = "tornado" version = "6.4.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/59/45/a0daf161f7d6f36c3ea5fc0c2de619746cc3dd4c76402e9db545bd920f63/tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b", size = 501135, upload-time = "2024-11-22T03:06:38.036Z" } +sdist = { url = "https://files.pythonhosted.org/packages/59/45/a0daf161f7d6f36c3ea5fc0c2de619746cc3dd4c76402e9db545bd920f63/tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b", size = 501135 } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/7e/71f604d8cea1b58f82ba3590290b66da1e72d840aeb37e0d5f7291bd30db/tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1", size = 436299, upload-time = "2024-11-22T03:06:20.162Z" }, - { url = "https://files.pythonhosted.org/packages/96/44/87543a3b99016d0bf54fdaab30d24bf0af2e848f1d13d34a3a5380aabe16/tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803", size = 434253, upload-time = "2024-11-22T03:06:22.39Z" }, - { url = "https://files.pythonhosted.org/packages/cb/fb/fdf679b4ce51bcb7210801ef4f11fdac96e9885daa402861751353beea6e/tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec", size = 437602, upload-time = "2024-11-22T03:06:24.214Z" }, - { url = "https://files.pythonhosted.org/packages/4f/3b/e31aeffffc22b475a64dbeb273026a21b5b566f74dee48742817626c47dc/tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946", size = 436972, upload-time = "2024-11-22T03:06:25.559Z" }, - { url = "https://files.pythonhosted.org/packages/22/55/b78a464de78051a30599ceb6983b01d8f732e6f69bf37b4ed07f642ac0fc/tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf", size = 437173, upload-time = "2024-11-22T03:06:27.584Z" }, - { url = "https://files.pythonhosted.org/packages/79/5e/be4fb0d1684eb822c9a62fb18a3e44a06188f78aa466b2ad991d2ee31104/tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634", size = 437892, upload-time = "2024-11-22T03:06:28.933Z" }, - { url = "https://files.pythonhosted.org/packages/f5/33/4f91fdd94ea36e1d796147003b490fe60a0215ac5737b6f9c65e160d4fe0/tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73", size = 437334, upload-time = "2024-11-22T03:06:30.428Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ae/c1b22d4524b0e10da2f29a176fb2890386f7bd1f63aacf186444873a88a0/tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c", size = 437261, upload-time = "2024-11-22T03:06:32.458Z" }, - { url = "https://files.pythonhosted.org/packages/b5/25/36dbd49ab6d179bcfc4c6c093a51795a4f3bed380543a8242ac3517a1751/tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482", size = 438463, upload-time = "2024-11-22T03:06:34.71Z" }, - { url = "https://files.pythonhosted.org/packages/61/cc/58b1adeb1bb46228442081e746fcdbc4540905c87e8add7c277540934edb/tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38", size = 438907, upload-time = "2024-11-22T03:06:36.71Z" }, + { url = "https://files.pythonhosted.org/packages/26/7e/71f604d8cea1b58f82ba3590290b66da1e72d840aeb37e0d5f7291bd30db/tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1", size = 436299 }, + { url = "https://files.pythonhosted.org/packages/96/44/87543a3b99016d0bf54fdaab30d24bf0af2e848f1d13d34a3a5380aabe16/tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803", size = 434253 }, + { url = "https://files.pythonhosted.org/packages/cb/fb/fdf679b4ce51bcb7210801ef4f11fdac96e9885daa402861751353beea6e/tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec", size = 437602 }, + { url = "https://files.pythonhosted.org/packages/4f/3b/e31aeffffc22b475a64dbeb273026a21b5b566f74dee48742817626c47dc/tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946", size = 436972 }, + { url = "https://files.pythonhosted.org/packages/22/55/b78a464de78051a30599ceb6983b01d8f732e6f69bf37b4ed07f642ac0fc/tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf", size = 437173 }, + { url = "https://files.pythonhosted.org/packages/79/5e/be4fb0d1684eb822c9a62fb18a3e44a06188f78aa466b2ad991d2ee31104/tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634", size = 437892 }, + { url = "https://files.pythonhosted.org/packages/f5/33/4f91fdd94ea36e1d796147003b490fe60a0215ac5737b6f9c65e160d4fe0/tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73", size = 437334 }, + { url = "https://files.pythonhosted.org/packages/2b/ae/c1b22d4524b0e10da2f29a176fb2890386f7bd1f63aacf186444873a88a0/tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c", size = 437261 }, + { url = "https://files.pythonhosted.org/packages/b5/25/36dbd49ab6d179bcfc4c6c093a51795a4f3bed380543a8242ac3517a1751/tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482", size = 438463 }, + { url = "https://files.pythonhosted.org/packages/61/cc/58b1adeb1bb46228442081e746fcdbc4540905c87e8add7c277540934edb/tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38", size = 438907 }, ] [[package]] @@ -1336,100 +1102,79 @@ name = "tqdm" version = "4.67.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "colorama", marker = "platform_system == 'Windows'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, ] [[package]] name = "traitlets" version = "5.14.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, -] - -[[package]] -name = "transformers" -version = "4.47.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "filelock" }, - { name = "huggingface-hub" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pyyaml" }, - { name = "regex" }, - { name = "requests" }, - { name = "safetensors" }, - { name = "tokenizers" }, - { name = "tqdm" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/15/1a/936aeb4f88112f670b604f5748034568dbc2b9bbb457a8d4518b1a15510a/transformers-4.47.1.tar.gz", hash = "sha256:6c29c05a5f595e278481166539202bf8641281536df1c42357ee58a45d0a564a", size = 8707421, upload-time = "2024-12-17T15:42:41.653Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/3a/8bdab26e09c5a242182b7ba9152e216d5ab4ae2d78c4298eb4872549cd35/transformers-4.47.1-py3-none-any.whl", hash = "sha256:d2f5d19bb6283cd66c893ec7e6d931d6370bbf1cc93633326ff1f41a40046c9c", size = 10133598, upload-time = "2024-12-17T15:42:35.1Z" }, + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, ] [[package]] name = "typing-extensions" version = "4.12.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload-time = "2024-06-07T18:52:15.995Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, ] [[package]] name = "tzdata" version = "2024.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282, upload-time = "2024-09-23T18:56:46.89Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586, upload-time = "2024-09-23T18:56:45.478Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, ] [[package]] name = "urllib3" -version = "2.3.0" +version = "2.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, ] [[package]] name = "wcwidth" version = "0.2.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, ] [[package]] name = "xxhash" version = "3.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241, upload-time = "2024-08-17T09:20:38.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/0e/1bfce2502c57d7e2e787600b31c83535af83746885aa1a5f153d8c8059d6/xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00", size = 31969, upload-time = "2024-08-17T09:18:24.025Z" }, - { url = "https://files.pythonhosted.org/packages/3f/d6/8ca450d6fe5b71ce521b4e5db69622383d039e2b253e9b2f24f93265b52c/xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9", size = 30787, upload-time = "2024-08-17T09:18:25.318Z" }, - { url = "https://files.pythonhosted.org/packages/5b/84/de7c89bc6ef63d750159086a6ada6416cc4349eab23f76ab870407178b93/xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84", size = 220959, upload-time = "2024-08-17T09:18:26.518Z" }, - { url = "https://files.pythonhosted.org/packages/fe/86/51258d3e8a8545ff26468c977101964c14d56a8a37f5835bc0082426c672/xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793", size = 200006, upload-time = "2024-08-17T09:18:27.905Z" }, - { url = "https://files.pythonhosted.org/packages/02/0a/96973bd325412feccf23cf3680fd2246aebf4b789122f938d5557c54a6b2/xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be", size = 428326, upload-time = "2024-08-17T09:18:29.335Z" }, - { url = "https://files.pythonhosted.org/packages/11/a7/81dba5010f7e733de88af9555725146fc133be97ce36533867f4c7e75066/xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6", size = 194380, upload-time = "2024-08-17T09:18:30.706Z" }, - { url = "https://files.pythonhosted.org/packages/fb/7d/f29006ab398a173f4501c0e4977ba288f1c621d878ec217b4ff516810c04/xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90", size = 207934, upload-time = "2024-08-17T09:18:32.133Z" }, - { url = "https://files.pythonhosted.org/packages/8a/6e/6e88b8f24612510e73d4d70d9b0c7dff62a2e78451b9f0d042a5462c8d03/xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27", size = 216301, upload-time = "2024-08-17T09:18:33.474Z" }, - { url = "https://files.pythonhosted.org/packages/af/51/7862f4fa4b75a25c3b4163c8a873f070532fe5f2d3f9b3fc869c8337a398/xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2", size = 203351, upload-time = "2024-08-17T09:18:34.889Z" }, - { url = "https://files.pythonhosted.org/packages/22/61/8d6a40f288f791cf79ed5bb113159abf0c81d6efb86e734334f698eb4c59/xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d", size = 210294, upload-time = "2024-08-17T09:18:36.355Z" }, - { url = "https://files.pythonhosted.org/packages/17/02/215c4698955762d45a8158117190261b2dbefe9ae7e5b906768c09d8bc74/xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab", size = 414674, upload-time = "2024-08-17T09:18:38.536Z" }, - { url = "https://files.pythonhosted.org/packages/31/5c/b7a8db8a3237cff3d535261325d95de509f6a8ae439a5a7a4ffcff478189/xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e", size = 192022, upload-time = "2024-08-17T09:18:40.138Z" }, - { url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8", size = 30170, upload-time = "2024-08-17T09:18:42.163Z" }, - { url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e", size = 30040, upload-time = "2024-08-17T09:18:43.699Z" }, - { url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2", size = 26796, upload-time = "2024-08-17T09:18:45.29Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6", size = 31795 }, + { url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5", size = 30792 }, + { url = "https://files.pythonhosted.org/packages/c3/cc/762312960691da989c7cd0545cb120ba2a4148741c6ba458aa723c00a3f8/xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc", size = 220950 }, + { url = "https://files.pythonhosted.org/packages/fe/e9/cc266f1042c3c13750e86a535496b58beb12bf8c50a915c336136f6168dc/xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3", size = 199980 }, + { url = "https://files.pythonhosted.org/packages/bf/85/a836cd0dc5cc20376de26b346858d0ac9656f8f730998ca4324921a010b9/xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c", size = 428324 }, + { url = "https://files.pythonhosted.org/packages/b4/0e/15c243775342ce840b9ba34aceace06a1148fa1630cd8ca269e3223987f5/xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb", size = 194370 }, + { url = "https://files.pythonhosted.org/packages/87/a1/b028bb02636dfdc190da01951d0703b3d904301ed0ef6094d948983bef0e/xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f", size = 207911 }, + { url = "https://files.pythonhosted.org/packages/80/d5/73c73b03fc0ac73dacf069fdf6036c9abad82de0a47549e9912c955ab449/xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7", size = 216352 }, + { url = "https://files.pythonhosted.org/packages/b6/2a/5043dba5ddbe35b4fe6ea0a111280ad9c3d4ba477dd0f2d1fe1129bda9d0/xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326", size = 203410 }, + { url = "https://files.pythonhosted.org/packages/a2/b2/9a8ded888b7b190aed75b484eb5c853ddd48aa2896e7b59bbfbce442f0a1/xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf", size = 210322 }, + { url = "https://files.pythonhosted.org/packages/98/62/440083fafbc917bf3e4b67c2ade621920dd905517e85631c10aac955c1d2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7", size = 414725 }, + { url = "https://files.pythonhosted.org/packages/75/db/009206f7076ad60a517e016bb0058381d96a007ce3f79fa91d3010f49cc2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c", size = 192070 }, + { url = "https://files.pythonhosted.org/packages/1f/6d/c61e0668943a034abc3a569cdc5aeae37d686d9da7e39cf2ed621d533e36/xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637", size = 30172 }, + { url = "https://files.pythonhosted.org/packages/96/14/8416dce965f35e3d24722cdf79361ae154fa23e2ab730e5323aa98d7919e/xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43", size = 30041 }, + { url = "https://files.pythonhosted.org/packages/27/ee/518b72faa2073f5aa8e3262408d284892cb79cf2754ba0c3a5870645ef73/xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b", size = 26801 }, ] [[package]] @@ -1441,23 +1186,23 @@ dependencies = [ { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062, upload-time = "2024-12-01T20:35:23.292Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644, upload-time = "2024-12-01T20:33:39.204Z" }, - { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962, upload-time = "2024-12-01T20:33:40.808Z" }, - { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795, upload-time = "2024-12-01T20:33:42.322Z" }, - { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368, upload-time = "2024-12-01T20:33:43.956Z" }, - { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314, upload-time = "2024-12-01T20:33:46.046Z" }, - { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987, upload-time = "2024-12-01T20:33:48.352Z" }, - { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914, upload-time = "2024-12-01T20:33:50.875Z" }, - { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765, upload-time = "2024-12-01T20:33:52.641Z" }, - { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444, upload-time = "2024-12-01T20:33:54.395Z" }, - { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760, upload-time = "2024-12-01T20:33:56.286Z" }, - { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484, upload-time = "2024-12-01T20:33:58.375Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864, upload-time = "2024-12-01T20:34:00.22Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537, upload-time = "2024-12-01T20:34:03.54Z" }, - { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861, upload-time = "2024-12-01T20:34:05.73Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097, upload-time = "2024-12-01T20:34:07.664Z" }, - { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399, upload-time = "2024-12-01T20:34:09.61Z" }, - { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109, upload-time = "2024-12-01T20:35:20.834Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/c7/c790513d5328a8390be8f47be5d52e141f78b66c6c48f48d241ca6bd5265/yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb", size = 140789 }, + { url = "https://files.pythonhosted.org/packages/30/aa/a2f84e93554a578463e2edaaf2300faa61c8701f0898725842c704ba5444/yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa", size = 94144 }, + { url = "https://files.pythonhosted.org/packages/c6/fc/d68d8f83714b221a85ce7866832cba36d7c04a68fa6a960b908c2c84f325/yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782", size = 91974 }, + { url = "https://files.pythonhosted.org/packages/56/4e/d2563d8323a7e9a414b5b25341b3942af5902a2263d36d20fb17c40411e2/yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0", size = 333587 }, + { url = "https://files.pythonhosted.org/packages/25/c9/cfec0bc0cac8d054be223e9f2c7909d3e8442a856af9dbce7e3442a8ec8d/yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482", size = 344386 }, + { url = "https://files.pythonhosted.org/packages/ab/5d/4c532190113b25f1364d25f4c319322e86232d69175b91f27e3ebc2caf9a/yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186", size = 345421 }, + { url = "https://files.pythonhosted.org/packages/23/d1/6cdd1632da013aa6ba18cee4d750d953104a5e7aac44e249d9410a972bf5/yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58", size = 339384 }, + { url = "https://files.pythonhosted.org/packages/9a/c4/6b3c39bec352e441bd30f432cda6ba51681ab19bb8abe023f0d19777aad1/yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53", size = 326689 }, + { url = "https://files.pythonhosted.org/packages/23/30/07fb088f2eefdc0aa4fc1af4e3ca4eb1a3aadd1ce7d866d74c0f124e6a85/yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2", size = 345453 }, + { url = "https://files.pythonhosted.org/packages/63/09/d54befb48f9cd8eec43797f624ec37783a0266855f4930a91e3d5c7717f8/yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8", size = 341872 }, + { url = "https://files.pythonhosted.org/packages/91/26/fd0ef9bf29dd906a84b59f0cd1281e65b0c3e08c6aa94b57f7d11f593518/yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1", size = 347497 }, + { url = "https://files.pythonhosted.org/packages/d9/b5/14ac7a256d0511b2ac168d50d4b7d744aea1c1aa20c79f620d1059aab8b2/yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a", size = 359981 }, + { url = "https://files.pythonhosted.org/packages/ca/b3/d493221ad5cbd18bc07e642894030437e405e1413c4236dd5db6e46bcec9/yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10", size = 366229 }, + { url = "https://files.pythonhosted.org/packages/04/56/6a3e2a5d9152c56c346df9b8fb8edd2c8888b1e03f96324d457e5cf06d34/yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8", size = 360383 }, + { url = "https://files.pythonhosted.org/packages/fd/b7/4b3c7c7913a278d445cc6284e59b2e62fa25e72758f888b7a7a39eb8423f/yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d", size = 310152 }, + { url = "https://files.pythonhosted.org/packages/f5/d5/688db678e987c3e0fb17867970700b92603cadf36c56e5fb08f23e822a0c/yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c", size = 315723 }, + { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109 }, ]